mindspore 2.3.0__cp39-none-any.whl → 2.3.0rc2__cp39-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mindspore might be problematic. Click here for more details.

Files changed (423) hide show
  1. mindspore/.commit_id +1 -1
  2. mindspore/Third_Party_Open_Source_Software_Notice +0 -1512
  3. mindspore/__init__.py +1 -2
  4. mindspore/_c_dataengine.cpython-39-aarch64-linux-gnu.so +0 -0
  5. mindspore/_c_expression.cpython-39-aarch64-linux-gnu.so +0 -0
  6. mindspore/_c_mindrecord.cpython-39-aarch64-linux-gnu.so +0 -0
  7. mindspore/_checkparam.py +25 -5
  8. mindspore/_extends/graph_kernel/model/graph_parallel.py +1 -1
  9. mindspore/_extends/parse/__init__.py +2 -2
  10. mindspore/_extends/parse/compile_config.py +0 -29
  11. mindspore/_extends/parse/namespace.py +2 -2
  12. mindspore/_extends/parse/parser.py +5 -21
  13. mindspore/_extends/parse/resources.py +7 -5
  14. mindspore/_extends/parse/standard_method.py +59 -40
  15. mindspore/_mindspore_offline_debug.cpython-39-aarch64-linux-gnu.so +0 -0
  16. mindspore/amp.py +5 -26
  17. mindspore/bin/cache_admin +0 -0
  18. mindspore/bin/cache_server +0 -0
  19. mindspore/boost/adasum.py +1 -1
  20. mindspore/boost/base.py +1 -1
  21. mindspore/boost/boost_cell_wrapper.py +1 -1
  22. mindspore/boost/grad_freeze.py +2 -2
  23. mindspore/boost/less_batch_normalization.py +6 -9
  24. mindspore/common/__init__.py +1 -8
  25. mindspore/common/_register_for_tensor.py +9 -8
  26. mindspore/common/api.py +65 -275
  27. mindspore/common/dtype.py +4 -8
  28. mindspore/common/dump.py +5 -2
  29. mindspore/common/jit_config.py +1 -1
  30. mindspore/common/lazy_inline.py +2 -14
  31. mindspore/common/parameter.py +15 -14
  32. mindspore/common/recompute.py +5 -20
  33. mindspore/common/sparse_tensor.py +6 -21
  34. mindspore/common/tensor.py +52 -100
  35. mindspore/communication/__init__.py +11 -6
  36. mindspore/communication/management.py +94 -92
  37. mindspore/context.py +18 -180
  38. mindspore/dataset/engine/datasets.py +46 -69
  39. mindspore/dataset/engine/datasets_user_defined.py +53 -72
  40. mindspore/dataset/engine/datasets_vision.py +2 -2
  41. mindspore/dataset/engine/queue.py +38 -56
  42. mindspore/dataset/engine/validators.py +5 -11
  43. mindspore/dataset/vision/__init__.py +5 -5
  44. mindspore/dataset/vision/c_transforms.py +5 -5
  45. mindspore/dataset/vision/py_transforms_util.py +1 -1
  46. mindspore/dataset/vision/transforms.py +46 -591
  47. mindspore/dataset/vision/utils.py +1 -121
  48. mindspore/dataset/vision/validators.py +3 -9
  49. mindspore/hal/__init__.py +1 -7
  50. mindspore/hal/device.py +1 -1
  51. mindspore/include/api/model.h +0 -3
  52. mindspore/include/dataset/vision.h +2 -54
  53. mindspore/include/mindapi/base/types.h +0 -1
  54. mindspore/lib/libdnnl.so.2 +0 -0
  55. mindspore/lib/libmindspore.so +0 -0
  56. mindspore/lib/libmindspore_backend.so +0 -0
  57. mindspore/lib/libmindspore_common.so +0 -0
  58. mindspore/lib/libmindspore_core.so +0 -0
  59. mindspore/lib/libmindspore_glog.so.0 +0 -0
  60. mindspore/lib/libmindspore_gpr.so.15 +0 -0
  61. mindspore/lib/libmindspore_grpc++.so.1 +0 -0
  62. mindspore/lib/libmindspore_grpc.so.15 +0 -0
  63. mindspore/lib/libmindspore_shared_lib.so +0 -0
  64. mindspore/lib/libmpi_adapter.so +0 -0
  65. mindspore/lib/libmpi_collective.so +0 -0
  66. mindspore/lib/libnnacl.so +0 -0
  67. mindspore/lib/libopencv_core.so.4.5 +0 -0
  68. mindspore/lib/libps_cache.so +0 -0
  69. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +0 -35
  70. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
  71. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/vector_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
  72. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
  73. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +0 -72
  74. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
  75. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/{aclnn_all_finite.h → aclnn_add_custom.h} +11 -9
  76. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_decoder_kv_cache.h +1 -1
  77. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_prompt_kv_cache.h +1 -1
  78. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/lib/libcust_opapi.so +0 -0
  79. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +12 -184
  80. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910/aic-ascend910-ops-info.json +15 -7
  81. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910b/aic-ascend910b-ops-info.json +15 -7
  82. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.cpp +81 -0
  83. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.py +134 -0
  84. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/decoder_kv_cache.py +31 -77
  85. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/prompt_kv_cache.py +31 -77
  86. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
  87. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
  88. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/inc/op_proto.h +5 -4
  89. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
  90. mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
  91. mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
  92. mindspore/lib/plugin/ascend/libhccl_plugin.so +0 -0
  93. mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
  94. mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
  95. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/DeviceBin +0 -0
  96. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/PkgInspect +0 -0
  97. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/op_man +0 -0
  98. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +286 -275
  99. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_cann_host.so +0 -0
  100. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_host.so +0 -0
  101. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops.so +0 -0
  102. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
  103. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -1
  104. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -1
  105. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -3
  106. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -5
  107. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_tiling.h +45 -1
  108. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -1
  109. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +4 -8
  110. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_tiling.h +4 -11
  111. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/kernel/flash_attention_score_mix_hwsync.h +0 -18
  112. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -6
  113. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +75 -1
  114. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/kernel/matmul.h +5 -5
  115. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/matmul_impl.h +3 -18
  116. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_common_tiling.h +5 -5
  117. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_info.h +2 -2
  118. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/tiling_data.h +3 -36
  119. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/kernel/matmul_stridedslice_fusion.h +2 -2
  120. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/matmul_stridedslice_fusion_impl.h +4 -22
  121. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +2 -16
  122. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/kernel/paged_attention_mix_hwsync.h +3 -1
  123. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +4 -5
  124. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_tiling.h +4 -9
  125. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +2 -5
  126. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -1
  127. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +4 -10
  128. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +12 -0
  129. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -1
  130. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -1
  131. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +1 -1
  132. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +2 -10
  133. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +1 -5
  134. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -1
  135. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -17
  136. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +7 -2
  137. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
  138. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
  139. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layernorm_impl.so +0 -0
  140. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_impl.so +0 -0
  141. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_impl.so +0 -0
  142. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_impl.so +0 -0
  143. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_impl.so +0 -0
  144. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_impl.so +0 -0
  145. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_stridedslice_fusion_impl.so +0 -0
  146. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
  147. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libnot_equal_impl.so +0 -0
  148. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_impl.so +0 -0
  149. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_impl.so +0 -0
  150. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
  151. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
  152. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_full_mix.o +0 -0
  153. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
  154. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
  155. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
  156. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_full_mix.o +0 -0
  157. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
  158. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bnsd_full_mix.o +0 -0
  159. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bsh_full_mix.o +0 -0
  160. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bnsd_full_mix.o +0 -0
  161. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bsh_full_mix.o +0 -0
  162. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblcal.so +0 -0
  163. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
  164. mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
  165. mindspore/mindrecord/filewriter.py +2 -2
  166. mindspore/mint/__init__.py +40 -720
  167. mindspore/mint/nn/__init__.py +7 -89
  168. mindspore/mint/nn/functional.py +16 -165
  169. mindspore/mint/optim/adamw.py +16 -15
  170. mindspore/nn/__init__.py +2 -0
  171. mindspore/nn/cell.py +98 -97
  172. mindspore/nn/extend/basic.py +2 -2
  173. mindspore/nn/extend/embedding.py +1 -1
  174. mindspore/nn/extend/layer/normalization.py +5 -7
  175. mindspore/nn/generator.py +297 -0
  176. mindspore/nn/layer/activation.py +3 -4
  177. mindspore/nn/layer/basic.py +16 -79
  178. mindspore/nn/layer/conv.py +8 -17
  179. mindspore/nn/layer/embedding.py +4 -1
  180. mindspore/nn/layer/math.py +1 -1
  181. mindspore/nn/layer/normalization.py +1 -1
  182. mindspore/nn/layer/pooling.py +0 -5
  183. mindspore/nn/layer/rnn_cells.py +2 -2
  184. mindspore/nn/loss/loss.py +19 -19
  185. mindspore/nn/optim/adasum.py +1 -1
  186. mindspore/nn/optim/sgd.py +2 -3
  187. mindspore/nn/probability/distribution/exponential.py +1 -1
  188. mindspore/nn/probability/distribution/geometric.py +1 -1
  189. mindspore/nn/probability/distribution/logistic.py +1 -1
  190. mindspore/nn/wrap/cell_wrapper.py +1 -25
  191. mindspore/nn/wrap/loss_scale.py +1 -24
  192. mindspore/numpy/array_ops.py +1 -5
  193. mindspore/numpy/dtypes.py +3 -3
  194. mindspore/numpy/math_ops.py +8 -8
  195. mindspore/ops/__init__.py +1 -1
  196. mindspore/ops/_grad_experimental/grad_comm_ops.py +16 -75
  197. mindspore/ops/_vmap/vmap_array_ops.py +0 -27
  198. mindspore/ops/_vmap/vmap_math_ops.py +1 -29
  199. mindspore/ops/_vmap/vmap_nn_ops.py +18 -19
  200. mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +8 -34
  201. mindspore/ops/auto_generate/gen_arg_dtype_cast.py +9 -2
  202. mindspore/ops/auto_generate/gen_arg_handler.py +0 -26
  203. mindspore/ops/auto_generate/gen_extend_func.py +27 -603
  204. mindspore/ops/auto_generate/gen_ops_def.py +203 -993
  205. mindspore/ops/auto_generate/gen_ops_prim.py +402 -1946
  206. mindspore/ops/auto_generate/pyboost_inner_prim.py +20 -90
  207. mindspore/ops/composite/base.py +6 -3
  208. mindspore/ops/composite/math_ops.py +1 -1
  209. mindspore/ops/composite/multitype_ops/_compile_utils.py +17 -24
  210. mindspore/ops/composite/multitype_ops/_constexpr_utils.py +1 -1
  211. mindspore/ops/extend/__init__.py +3 -2
  212. mindspore/ops/extend/array_func.py +51 -10
  213. mindspore/ops/extend/nn_func.py +78 -2
  214. mindspore/ops/function/__init__.py +13 -8
  215. mindspore/ops/function/array_func.py +179 -455
  216. mindspore/ops/function/clip_func.py +1 -1
  217. mindspore/ops/function/grad/grad_func.py +3 -3
  218. mindspore/ops/function/math_func.py +103 -117
  219. mindspore/ops/function/nn_func.py +163 -275
  220. mindspore/ops/function/other_func.py +2 -2
  221. mindspore/ops/function/random_func.py +69 -202
  222. mindspore/ops/function/sparse_func.py +4 -4
  223. mindspore/ops/functional.py +327 -332
  224. mindspore/ops/operations/__init__.py +3 -13
  225. mindspore/ops/operations/_grad_ops.py +27 -3
  226. mindspore/ops/operations/_inner_ops.py +356 -53
  227. mindspore/ops/operations/_rl_inner_ops.py +2 -2
  228. mindspore/ops/operations/_tensor_array.py +8 -8
  229. mindspore/ops/operations/array_ops.py +65 -82
  230. mindspore/ops/operations/comm_ops.py +93 -784
  231. mindspore/ops/operations/custom_ops.py +28 -51
  232. mindspore/ops/operations/debug_ops.py +4 -4
  233. mindspore/ops/operations/inner_ops.py +2 -2
  234. mindspore/ops/operations/manually_defined/ops_def.py +4 -304
  235. mindspore/ops/operations/math_ops.py +50 -3
  236. mindspore/ops/operations/nn_ops.py +247 -14
  237. mindspore/ops/operations/other_ops.py +3 -3
  238. mindspore/ops/operations/random_ops.py +1 -1
  239. mindspore/ops/operations/sparse_ops.py +1 -1
  240. mindspore/ops/primitive.py +8 -9
  241. mindspore/ops/silent_check.py +5 -5
  242. mindspore/ops_generate/arg_dtype_cast.py +9 -2
  243. mindspore/ops_generate/arg_handler.py +0 -26
  244. mindspore/ops_generate/gen_aclnn_implement.py +4 -1
  245. mindspore/ops_generate/gen_ops.py +4 -26
  246. mindspore/ops_generate/gen_pyboost_func.py +12 -41
  247. mindspore/ops_generate/gen_utils.py +0 -21
  248. mindspore/ops_generate/pyboost_utils.py +2 -7
  249. mindspore/ops_generate/template.py +0 -1
  250. mindspore/parallel/_auto_parallel_context.py +1 -21
  251. mindspore/parallel/_tensor.py +5 -0
  252. mindspore/parallel/_transformer/transformer.py +1 -1
  253. mindspore/parallel/_utils.py +1 -15
  254. mindspore/parallel/algo_parameter_config.py +3 -1
  255. mindspore/parallel/checkpoint_transform.py +9 -12
  256. mindspore/parallel/cluster/process_entity/_api.py +29 -28
  257. mindspore/parallel/cluster/process_entity/_utils.py +3 -13
  258. mindspore/parallel/cluster/run.py +16 -13
  259. mindspore/parallel/parameter_broadcast.py +2 -2
  260. mindspore/parallel/shard.py +17 -31
  261. mindspore/profiler/__init__.py +2 -3
  262. mindspore/profiler/common/util.py +2 -107
  263. mindspore/profiler/envprofiling.py +1 -1
  264. mindspore/profiler/parser/ascend_analysis/constant.py +21 -8
  265. mindspore/profiler/parser/ascend_analysis/file_manager.py +0 -82
  266. mindspore/profiler/parser/ascend_analysis/function_event.py +28 -43
  267. mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +27 -49
  268. mindspore/profiler/parser/ascend_analysis/fwk_file_parser.py +10 -15
  269. mindspore/profiler/parser/ascend_analysis/msprof_timeline_parser.py +20 -25
  270. mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +5 -5
  271. mindspore/profiler/parser/ascend_analysis/trace_event_manager.py +1 -10
  272. mindspore/profiler/parser/ascend_hccl_generator.py +1 -4
  273. mindspore/profiler/parser/ascend_msprof_exporter.py +22 -43
  274. mindspore/profiler/parser/ascend_timeline_generator.py +5 -7
  275. mindspore/profiler/parser/minddata_parser.py +3 -72
  276. mindspore/profiler/profiling.py +59 -176
  277. mindspore/rewrite/api/node.py +1 -1
  278. mindspore/rewrite/common/namespace.py +5 -5
  279. mindspore/rewrite/parsers/assign_parser.py +0 -2
  280. mindspore/rewrite/parsers/class_def_parser.py +4 -8
  281. mindspore/run_check/_check_version.py +1 -1
  282. mindspore/scipy/fft.py +3 -1
  283. mindspore/scipy/linalg.py +3 -2
  284. mindspore/scipy/ops.py +3 -5
  285. mindspore/scipy/optimize/__init__.py +2 -2
  286. mindspore/train/__init__.py +4 -4
  287. mindspore/train/anf_ir_pb2.py +2 -8
  288. mindspore/train/callback/__init__.py +2 -5
  289. mindspore/train/callback/_backup_and_restore.py +2 -2
  290. mindspore/train/callback/_checkpoint.py +16 -104
  291. mindspore/train/callback/_landscape.py +1 -1
  292. mindspore/train/callback/_time_monitor.py +1 -1
  293. mindspore/train/data_sink.py +4 -5
  294. mindspore/train/dataset_helper.py +20 -45
  295. mindspore/train/model.py +38 -266
  296. mindspore/train/serialization.py +105 -256
  297. mindspore/train/summary/_summary_adapter.py +1 -1
  298. mindspore/version.py +1 -1
  299. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/METADATA +2 -2
  300. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/RECORD +303 -420
  301. mindspore/_extends/pijit/__init__.py +0 -23
  302. mindspore/_extends/pijit/pijit_func_white_list.py +0 -343
  303. mindspore/common/file_system.py +0 -48
  304. mindspore/common/generator.py +0 -260
  305. mindspore/common/no_inline.py +0 -54
  306. mindspore/common/np_dtype.py +0 -25
  307. mindspore/communication/comm_func.py +0 -1140
  308. mindspore/hal/memory.py +0 -326
  309. mindspore/lib/libavcodec.so.59 +0 -0
  310. mindspore/lib/libavdevice.so.59 +0 -0
  311. mindspore/lib/libavfilter.so.8 +0 -0
  312. mindspore/lib/libavformat.so.59 +0 -0
  313. mindspore/lib/libavutil.so.57 +0 -0
  314. mindspore/lib/libmindspore_np_dtype.so +0 -0
  315. mindspore/lib/libswresample.so.4 +0 -0
  316. mindspore/lib/libswscale.so.6 +0 -0
  317. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.cpp +0 -326
  318. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.py +0 -180
  319. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.json +0 -58
  320. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.o +0 -0
  321. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.json +0 -58
  322. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.o +0 -0
  323. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.json +0 -58
  324. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.o +0 -0
  325. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/all_finite.json +0 -109
  326. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/binary_info_config.json +0 -38
  327. mindspore/lib/plugin/ascend/custom_compiler/OWNERS +0 -12
  328. mindspore/lib/plugin/ascend/custom_compiler/setup.py +0 -255
  329. mindspore/lib/plugin/ascend/custom_compiler/start.sh +0 -26
  330. mindspore/lib/plugin/ascend/custom_compiler/template.json +0 -40
  331. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
  332. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -69
  333. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/base_type.h +0 -133
  334. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -32
  335. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -35
  336. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/tiling_info.h +0 -60
  337. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -37
  338. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
  339. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
  340. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_op.h +0 -34
  341. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_backoff_base.h +0 -62
  342. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -33
  343. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_ops.h +0 -88
  344. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_pa_op.h +0 -45
  345. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
  346. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -95
  347. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -84
  348. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -61
  349. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -224
  350. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
  351. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
  352. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
  353. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
  354. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
  355. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
  356. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
  357. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
  358. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
  359. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
  360. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
  361. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
  362. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
  363. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
  364. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
  365. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
  366. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
  367. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
  368. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
  369. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
  370. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
  371. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
  372. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
  373. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
  374. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
  375. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
  376. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
  377. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
  378. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
  379. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
  380. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
  381. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
  382. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
  383. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
  384. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
  385. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
  386. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_impl.h +0 -45
  387. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_tiling.h +0 -187
  388. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul.h +0 -245
  389. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_interface.h +0 -24
  390. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_utils.h +0 -111
  391. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/tiling_data.h +0 -54
  392. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
  393. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
  394. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
  395. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -364
  396. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -69
  397. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
  398. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -114
  399. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -98
  400. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.json +0 -19
  401. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.o +0 -0
  402. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aic_0.o +0 -0
  403. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aiv_0.o +0 -0
  404. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.json +0 -19
  405. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.o +0 -0
  406. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aic_0.o +0 -0
  407. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aiv_0.o +0 -0
  408. mindspore/mint/linalg/__init__.py +0 -22
  409. mindspore/nn/layer/embedding_service.py +0 -531
  410. mindspore/nn/layer/embedding_service_layer.py +0 -393
  411. mindspore/ops/function/reshard_func.py +0 -102
  412. mindspore/ops/operations/_infer_ops.py +0 -19
  413. mindspore/ops/operations/reshard_ops.py +0 -53
  414. mindspore/profiler/common/process_pool.py +0 -41
  415. mindspore/profiler/common/singleton.py +0 -28
  416. mindspore/profiler/parser/ascend_integrate_generator.py +0 -42
  417. mindspore/profiler/parser/ascend_memory_generator.py +0 -185
  418. mindspore/train/callback/_cluster_monitor.py +0 -201
  419. mindspore/train/callback/_flops_collector.py +0 -238
  420. mindspore/train/callback/_mindio_ttp.py +0 -443
  421. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/WHEEL +0 -0
  422. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/entry_points.txt +0 -0
  423. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/top_level.txt +0 -0
mindspore/common/api.py CHANGED
@@ -52,6 +52,7 @@ from mindspore.common._register_for_adapter import ms_adapter_registry
52
52
  from mindspore.common.auto_dynamic_shape import get_auto_dynamic_shape_args, update_auto_dynamic_shape_phase, \
53
53
  get_auto_dynamic_shape_args_with_check_input_signature, update_auto_dynamic_shape_phase_with_check_input_signature
54
54
 
55
+
55
56
  # Store ms_function class compiled pipeline cache.
56
57
  ms_compile_cache = set()
57
58
  # Store cell compiled pipeline cache.
@@ -62,67 +63,13 @@ function_phases = dict()
62
63
  BROADCAST_PHASE = "_broadcast_"
63
64
  _PYNATIVE_PARALLEL_FUNC_NAME = "after_shard"
64
65
 
65
- ARG_SPECIFIED = "arg_specified_infos"
66
- TOTAL_ARG_LEN = "total_arg_length"
67
-
68
-
69
- def _check_recompile_args(compile_args, kwargs):
70
- """Check recompile of graph"""
71
-
72
- def _check_constant_tensor_arg(arg):
73
- if hasattr(arg, "__ms_mutable__"):
74
- return False
75
- if isinstance(arg, (list, tuple)):
76
- return any(_check_constant_tensor_arg(x) for x in arg)
77
- return isinstance(arg, Tensor)
78
-
79
- for v in kwargs.values():
80
- compile_args += (v,)
81
- for arg in compile_args:
82
- if not isinstance(arg, tuple) and not isinstance(arg, list):
83
- continue
84
- if _check_constant_tensor_arg(arg):
85
- logger.warning(f"Constant value tensor are detected in tuple or list, which might cause recompiling "
86
- f"when tensor value changes. You can use mutable(Tensor) or mutable(tuple(Tensor)) "
87
- f"to set tensor's value as variable to to avoid recompiling. The tuple or list arg "
88
- f"is: {arg} .")
89
- return
90
-
91
-
92
- def _check_recompile(obj, compile_args, kwargs, full_function_name, create_time, echo_function_name):
93
- """Warning when the function has been compiled."""
94
- ignore_dirs = ["mindspore/ops", "mindspore/nn"]
95
- if any((lambda x: x in full_function_name)(x) for x in ignore_dirs):
96
- return
97
-
98
- if full_function_name in function_phases:
99
- warning_times = 1
100
- if len(function_phases[full_function_name]) >= warning_times \
101
- and create_time not in function_phases[full_function_name]:
102
- if isinstance(obj, ms.nn.Cell):
103
- tips = f"Please try to create {echo_function_name} instance only once to avoid recompiling. "
104
- logger.info(f"The {echo_function_name} has been compiled again. "
105
- f"{tips} ")
106
- else:
107
- tips = "Try to decorate the function with @jit(hash_args=...) " \
108
- "or @jit(compile_once=True) to reduce the compile time. " \
109
- "For more details, get instructions about `jit` at " \
110
- "https://www.mindspore.cn/search?inputValue=jit."
111
- logger.warning(f"The {echo_function_name} has been compiled again. "
112
- f"{tips} ")
113
- else:
114
- _check_recompile_args(compile_args, kwargs)
115
- else:
116
- function_phases[full_function_name] = set()
117
- function_phases[full_function_name].add(create_time)
118
-
119
66
 
120
67
  def _ms_adapter_tensor_as_parameter_output(data):
121
68
  """Check whether the data is an output from a parameter which is a ms_adapter tensor.
122
69
  Pylint: disable=unidiomatic-typecheck.
123
70
  """
124
71
  return ms_adapter_registry.is_registered and isinstance(data, ms_adapter_registry.tensor) \
125
- and hasattr(data, "__ms_parameter_output__") and getattr(data, "__ms_parameter_output__")
72
+ and hasattr(data, "__ms_parameter_output__") and getattr(data, "__ms_parameter_output__")
126
73
 
127
74
 
128
75
  def _convert_python_data(data):
@@ -137,7 +84,7 @@ def _convert_python_data(data):
137
84
  """
138
85
  if isinstance(data, (Tensor, PythonTensor)) and data.adapter_flag:
139
86
  return ms_adapter_registry.tensor(data)
140
- if _ms_adapter_tensor_as_parameter_output(data) and hasattr(data, "tensor"):
87
+ if _ms_adapter_tensor_as_parameter_output(data):
141
88
  return data.tensor
142
89
  if isinstance(data, Tensor) and not isinstance(data, PythonTensor):
143
90
  return PythonTensor(data, internal=True)
@@ -231,7 +178,6 @@ def _handle_func_args(func, *args, **kwargs):
231
178
 
232
179
  sys_path = list(sys.path)
233
180
  # Get the entry script path.
234
- entry_script_path = None
235
181
  if sys.argv and sys.argv[0] != '':
236
182
  entry_script_path = os.path.realpath(sys.argv[0])
237
183
  entry_script_path_dir = os.path.split(entry_script_path)[0]
@@ -401,118 +347,6 @@ def _get_args_for_run_predict(obj, args, kwargs, compile_args):
401
347
  return new_args
402
348
 
403
349
 
404
- def _is_args_fullmode(args, is_init=True):
405
- """Check whether the arguments is for incremental-mode.
406
-
407
- Args:
408
- args (Union[list, tuple, dict, Tensor]): Given arguments.
409
- is_init (bool): Is check in argument initialization phase.
410
-
411
- Raises:
412
- RuntimeError: loss necessary keys and values for incremental-mode.
413
-
414
- Returns:
415
- bool: Fullmode or not.
416
- """
417
- if not isinstance(args, dict):
418
- return True
419
- if not is_init and (args.get(ARG_SPECIFIED, None) is None or args.get(TOTAL_ARG_LEN, None) is None):
420
- raise RuntimeError(
421
- "The incremental inputs should be processed(with \"%s\" and \"%s\"), but got %s." %
422
- (ARG_SPECIFIED, TOTAL_ARG_LEN, str(args)))
423
- return False
424
-
425
-
426
- def _process_dyn_args(fn, dyn_args):
427
- """Process the dynamic arguments, return the necessary data for latter processing.
428
-
429
- Args:
430
- fn (Function): The root function to compile.
431
- dyn_args (Union[dict, list, tuple, None]): Given arguments for dynamic compilation.
432
- None for nothing, list or tuple for fullmode setting, dict for incremental configuration.
433
-
434
- Returns:
435
- A dict which contains args for dynamic compilation. None for nothing dynamic.
436
- """
437
- if dyn_args is None:
438
- # nothing should be done for None.
439
- return dyn_args
440
-
441
- if isinstance(dyn_args, dict) and ARG_SPECIFIED in dyn_args:
442
- return dyn_args
443
-
444
- args_sig = inspect.signature(fn)
445
- if _is_args_fullmode(dyn_args):
446
- if not isinstance(dyn_args, (list, tuple)):
447
- temp_dyn_args = (dyn_args,)
448
- else:
449
- temp_dyn_args = dyn_args
450
-
451
- # If dyn_args is fullmode, it should be apply directly.
452
- args_sig_parameters = list(args_sig.parameters.values())
453
- if not args_sig_parameters:
454
- return ()
455
-
456
- # fn may be Cell's construct while the first input is 'self'.
457
- if args_sig_parameters[0].name == "self" and (len(temp_dyn_args) + 1) == len(args_sig_parameters):
458
- bound_args = args_sig.bind(None, *temp_dyn_args)
459
- bound_args.apply_defaults()
460
- return bound_args.args[1:]
461
-
462
- bound_args = args_sig.bind(*temp_dyn_args)
463
- bound_args.apply_defaults()
464
- return bound_args.args
465
-
466
- # The dyn_args is not fullmode, a real compilation arguments should be assembled by latter procession...
467
- arg_names = []
468
- args_sig_parameters = list(args_sig.parameters.values())
469
- for arg_p in args_sig_parameters:
470
- if arg_p.kind in (inspect.Parameter.POSITIONAL_ONLY, inspect.Parameter.POSITIONAL_OR_KEYWORD):
471
- arg_names.append(arg_p.name)
472
- else:
473
- raise TypeError("Dynamic arguments is not accepted for VAR_POSITIONAL or VAR_KEYWORD parameters!")
474
-
475
- offset = -1 if fn.__name__ == 'construct' and args_sig_parameters[0].name == "self" else 0
476
- meet_index = set()
477
-
478
- def _check_index_valid(index):
479
- if index >= len(arg_names):
480
- raise ValueError("For dict mode, valid index is \"0\"-\"%d\", but got %s!" % (len(arg_names) - 1, index))
481
- if index in meet_index:
482
- raise ValueError("For dict mode, there are more than one same specified key for real index: %d!" % index)
483
- meet_index.add(index)
484
-
485
- arg_handler_infos = []
486
- for k, v in dyn_args.items():
487
- if not isinstance(k, str):
488
- raise TypeError("For dict mode, only string key is accepted, but got %s!" % k)
489
- if k in arg_names:
490
- cur_id = arg_names.index(k)
491
- _check_index_valid(cur_id)
492
- arg_handler_infos.append([cur_id + offset, v])
493
- else:
494
- raise ValueError("For dict mode, valid key is %s, but got %s!" % (arg_names, k))
495
- return {ARG_SPECIFIED: arg_handler_infos, TOTAL_ARG_LEN: len(args_sig_parameters)}
496
-
497
-
498
- def _generate_dyn_compile_args(compile_args, dyn_args):
499
- """Generate the dynamic compile arguments."""
500
- if not dyn_args:
501
- return compile_args
502
- if _is_args_fullmode(dyn_args, False):
503
- if not isinstance(dyn_args, (list, tuple)):
504
- return (dyn_args,)
505
- return dyn_args
506
- arg_specified_infos = dyn_args.get(ARG_SPECIFIED, None)
507
- if arg_specified_infos is None:
508
- raise RuntimeError("For dict mode, a key with \"%s\" should exist, but got %s!" %
509
- (ARG_SPECIFIED, str(dyn_args)))
510
- new_compile_args = list(compile_args)
511
- for index, arg in arg_specified_infos:
512
- new_compile_args[index] = arg
513
- return tuple(new_compile_args)
514
-
515
-
516
350
  class _MindsporeFunctionExecutor:
517
351
  """
518
352
  Represents a function compiled by graph compiler.
@@ -530,7 +364,6 @@ class _MindsporeFunctionExecutor:
530
364
  Returns:
531
365
  The result of pipeline running in graph mode.
532
366
  """
533
-
534
367
  def __init__(self, fn, ms_create_time, input_signature=None, obj=None, jit_config=None):
535
368
  init_pipeline()
536
369
  if not isinstance(fn, (types.FunctionType, types.MethodType)):
@@ -548,6 +381,7 @@ class _MindsporeFunctionExecutor:
548
381
  self._compile_args = None
549
382
  self.jit_config_dict = jit_config.jit_config_dict if jit_config else None
550
383
 
384
+
551
385
  @_wrap_func
552
386
  def __call__(self, *args, **kwargs):
553
387
  args_list = args
@@ -575,6 +409,7 @@ class _MindsporeFunctionExecutor:
575
409
 
576
410
  return output
577
411
 
412
+
578
413
  def compile(self, method_name, *args, **kwargs):
579
414
  """Returns pipeline for the given args."""
580
415
  # Check whether hook function registered on Cell object.
@@ -635,7 +470,7 @@ class _MindsporeFunctionExecutor:
635
470
  self._graph_executor.clear_compile_arguments_resource()
636
471
  return phase
637
472
 
638
- _check_recompile(self.obj, compile_args, kwargs, full_function_name, create_time, echo_function_name)
473
+ self._check_recompile(full_function_name, create_time, echo_function_name)
639
474
 
640
475
  # If enable compile cache, get the dependency files list and set to graph executor.
641
476
  self._set_compile_cache_dep_files()
@@ -667,6 +502,31 @@ class _MindsporeFunctionExecutor:
667
502
 
668
503
  return phase
669
504
 
505
+
506
+ @staticmethod
507
+ def _check_recompile(full_function_name, create_time, echo_function_name):
508
+ """Warning when the function has been compiled."""
509
+ ignore_dirs = ["mindspore/ops", "mindspore/nn"]
510
+ if any((lambda x: x in full_function_name)(x) for x in ignore_dirs):
511
+ return
512
+
513
+ if full_function_name in function_phases:
514
+ warning_times = 1
515
+ if len(function_phases[full_function_name]) >= warning_times \
516
+ and create_time not in function_phases[full_function_name]:
517
+ tips = "Try to decorate the function with @jit(hash_args=...) " \
518
+ "or @jit(compile_once=True) to reduce the compile time. " \
519
+ "For more details, get instructions about `jit` at " \
520
+ "https://www.mindspore.cn/search?inputValue=jit."
521
+
522
+ logger.warning(f"The {echo_function_name} has been compiled again. "
523
+ f"{tips} ")
524
+ else:
525
+ function_phases[full_function_name] = set()
526
+
527
+ function_phases[full_function_name].add(create_time)
528
+
529
+
670
530
  @staticmethod
671
531
  def _optimizer_state_init(opt_states):
672
532
  """set data for all optimizer states in case it is executed in graph mode"""
@@ -677,6 +537,7 @@ class _MindsporeFunctionExecutor:
677
537
  if opt_param.has_init and (prefix in prefix_list or opt_param.name == "global_step"):
678
538
  opt_param.init_data()
679
539
 
540
+
680
541
  def _get_key_id(self):
681
542
  """get key id."""
682
543
  if isinstance(self.obj, ms.nn.Cell):
@@ -688,6 +549,7 @@ class _MindsporeFunctionExecutor:
688
549
  key_id = key_id + ".grad"
689
550
  return key_id
690
551
 
552
+
691
553
  def _get_generate_name(self):
692
554
  """get generate name."""
693
555
  generate_name = self.fn.__module__ + "." + self.fn.__name__ + "." + self.fn.__code__.co_filename + "." + str(
@@ -700,47 +562,56 @@ class _MindsporeFunctionExecutor:
700
562
  generate_name = generate_name[:generate_name.rfind(str(id(self.fn)))] + str(id(self.shard_parent_obj))
701
563
  return generate_name, echo_function_name
702
564
 
565
+
703
566
  def _set_compile_cache_dep_files(self):
704
567
  # If enable compile cache, get the dependency files list
705
568
  enable_compile_cache = context.get_context("enable_compile_cache")
706
- if enable_compile_cache is None:
569
+ if enable_compile_cache is not True and enable_compile_cache != "1":
707
570
  enable_compile_cache = os.getenv('MS_COMPILER_CACHE_ENABLE')
708
571
  if enable_compile_cache is True or enable_compile_cache == "1":
709
572
  self._graph_executor.set_compile_cache_dep_files(_get_compile_cache_dep_files())
710
573
 
574
+
711
575
  def _generate_compile_args(self, args_list):
712
576
  """Chose dynamic shape tensors or actual input tensors as compile args."""
713
577
  # Case: If the shape of input args is dynamic, get dynamic shape tensor from context and use it to compile.
714
578
  compile_args = _pynative_executor.get_dynamic_input(args_list)
715
579
  # Case: The `set_inputs()` of Cell object has been set, using these dynamic shape args as compile args.
716
580
  if self.fn.__name__ == 'construct' and isinstance(self.obj, ms.nn.Cell) and self.obj.get_inputs():
717
- compile_args = _generate_dyn_compile_args(args_list, self.obj.get_inputs())
581
+ compile_args = self.obj.get_inputs()
718
582
  if len(compile_args) != len(args_list):
719
583
  raise ValueError(f"The number of actual input tensors: {len(args_list)} is not equal to the number of "
720
584
  f"dynamic shape tensors: {len(compile_args)}.")
721
- self._graph_executor.check_argument_consistency(compile_args, args_list, "input_signature")
585
+ for i, elem in enumerate(compile_args):
586
+ if isinstance(elem, PythonTensor):
587
+ Validator.check_dynamic_shape(compile_args[i], args_list[i], i)
722
588
  Validator.check_symbolic_shape(compile_args, args_list)
723
589
 
724
590
  # Case: If dynamic shape tensors have been assigned to `input_signature`, they are preferred as compile args.
725
591
  if self.input_signature is not None:
726
- compile_args = list(_generate_dyn_compile_args(args_list, self.input_signature))
727
- dyn_shape = any([is_shape_unknown(elem.shape) for elem in compile_args if isinstance(elem, PythonTensor)])
592
+ if not isinstance(self.input_signature, (tuple, list)):
593
+ self.input_signature = (self.input_signature,)
594
+ self.input_signature = list(self.input_signature)
595
+ dyn_shape = False
596
+ for i, elem in enumerate(self.input_signature):
597
+ if isinstance(elem, PythonTensor) and is_shape_unknown(elem.shape):
598
+ Validator.check_dynamic_shape(self.input_signature[i], args_list[i], i)
599
+ dyn_shape = True
728
600
  Validator.check_symbolic_shape(self.input_signature, args_list)
729
601
  if dyn_shape:
730
602
  # Checkout whether the `sens` has been added to args_list.
731
- if len(compile_args) == len(args_list) - 1:
603
+ if len(self.input_signature) == len(args_list) - 1:
732
604
  logger.warning(f"The number of actual input args '{len(args_list)}' is one more than the number "
733
- f"of input_signature args '{len(compile_args)}'. The last actual args may "
605
+ f"of input_signature args '{len(self.input_signature)}'. The last actual args may "
734
606
  f"be 'sens' and added it to compile args.")
735
- compile_args.append(args_list[-1])
736
- compile_args = tuple(compile_args)
737
- self._graph_executor.check_argument_consistency(compile_args, args_list, "input_signature")
607
+ self.input_signature.append(args_list[-1])
608
+ compile_args = tuple(self.input_signature)
738
609
  if self.obj is not None:
739
610
  _pynative_executor.set_dynamic_input(self.obj, *compile_args)
740
611
  else:
741
612
  _pynative_executor.set_dynamic_input(self.fn, *compile_args)
742
613
  else:
743
- if not verify_inputs_signature(compile_args, args_list):
614
+ if not verify_inputs_signature(self.input_signature, args_list):
744
615
  raise ValueError("The input args is incompatible with the args in `input_signature`!")
745
616
  return compile_args
746
617
 
@@ -783,18 +654,6 @@ def _get_jit_hash(hash_input):
783
654
  return _get_obj_id(hash_input)
784
655
 
785
656
 
786
- def _update_graph_executor_config(jit_config):
787
- """Update GraphExecutor jit_config"""
788
- if isinstance(jit_config, JitConfig):
789
- jit_config = jit_config.jit_config_dict
790
- if not isinstance(jit_config, dict):
791
- return
792
- valid_config = dict()
793
- for k, v in jit_config.items():
794
- valid_config[str(k)] = str(v)
795
- GraphExecutor_.get_instance().set_jit_config(JitConfig(**valid_config).jit_config_dict)
796
-
797
-
798
657
  def jit(fn=None, mode="PSJit", input_signature=None, hash_args=None, jit_config=None, compile_once=False):
799
658
  """
800
659
  Create a callable MindSpore graph from a Python function.
@@ -805,23 +664,13 @@ def jit(fn=None, mode="PSJit", input_signature=None, hash_args=None, jit_config=
805
664
  fn (Function): The Python function that will be run as a graph. Default: ``None`` .
806
665
  mode (str): The type of jit used, the value of mode should be ``PIJit`` or ``PSJit``. Default: ``PSJit`` .
807
666
 
808
- - `PSJit <https://www.mindspore.cn/docs/en/master/note/static_graph_syntax_support.html>`_ :
809
- Parse python ast to build graph.
810
- - `PIJit <https://www.mindspore.cn/docs/en/master/design/dynamic_graph_and_static_graph.html>`_ :
811
- Parse python bytecode to build graph at runtime.
812
-
813
- input_signature (Union[Tuple, List, Dict, Tensor]): The Tensor which describes the input arguments. The
814
- shape and dtype of the Tensor will be supplied to this function. If `input_signature` is specified, the
815
- input parameters of `fn` cannot accept `**kwargs`, and the shape and dtype of actual inputs should keep the
816
- same as `input_signature`. Otherwise, TypeError will be raised. There are two mode for `input_signature`:
817
-
818
- - Full mode: Arguments is a Tuple, List or a Tensor, and they will be used as all compile inputs
819
- for graph-compiling.
820
- - Incremental mode: Argument is a Dict, and they will set to some of the graph inputs, which will be
821
- substituted into the input at the corresponding position for graph-compiling.
822
-
823
- Default: ``None`` .
667
+ - `PSJit <https://www.mindspore.cn/docs/en/master/note/static_graph_syntax_support.html>`_ : MindSpore GRAPH_MODE.
668
+ - `PIJit <https://www.mindspore.cn/docs/en/master/design/dynamic_graph_and_static_graph.html>`_ : MindSpore PYNATIVE_MODE.
824
669
 
670
+ input_signature (Tensor): The Tensor which describes the input arguments. The shape and dtype of the Tensor
671
+ will be supplied to this function. If input_signature is specified, each input to `fn` must be a `Tensor`.
672
+ And the input parameters of `fn` cannot accept `**kwargs`. The shape and dtype of actual inputs should
673
+ keep the same as input_signature. Otherwise, TypeError will be raised. Default: ``None`` .
825
674
  hash_args (Union[Object, List or Tuple of Objects]): The local free variables used inside `fn`,
826
675
  like functions or objects of class defined outside `fn`. Calling `fn` again with change of `hash_args`
827
676
  will trigger recompilation. Default: ``None`` .
@@ -877,13 +726,6 @@ def jit(fn=None, mode="PSJit", input_signature=None, hash_args=None, jit_config=
877
726
  ...
878
727
  >>> out = tensor_add_with_sig(x, y)
879
728
  ...
880
- >>> @jit(input_signature={"y": Tensor(np.ones([1, 1, 3, 3]).astype(np.float32))})
881
- ... def tensor_add_with_sig_1(x, y):
882
- ... z = x + y
883
- ... return z
884
- ...
885
- >>> out1 = tensor_add_with_sig_1(x, y)
886
- ...
887
729
  ... # Set hash_args as fn, otherwise cache of compiled closure_fn will not be reused.
888
730
  ... # While fn differs during calling again, recompilation will be triggered.
889
731
  >>> def func(x):
@@ -923,8 +765,6 @@ def jit(fn=None, mode="PSJit", input_signature=None, hash_args=None, jit_config=
923
765
  else:
924
766
  hash_obj = int(time.time() * 1e9)
925
767
 
926
- dyn_args = _process_dyn_args(func, input_signature)
927
-
928
768
  @wraps(func)
929
769
  def staging_specialize(*args, **kwargs):
930
770
  if os.getenv("MS_JIT") == '0':
@@ -938,7 +778,7 @@ def jit(fn=None, mode="PSJit", input_signature=None, hash_args=None, jit_config=
938
778
  # only the function or cell instance wrapped by shard will fall into this branch
939
779
  if _is_pynative_parallel() and func.__name__ == _PYNATIVE_PARALLEL_FUNC_NAME:
940
780
  process_obj = hash_args
941
- out = _MindsporeFunctionExecutor(func, hash_obj, dyn_args, process_obj, jit_config)(*args, **kwargs)
781
+ out = _MindsporeFunctionExecutor(func, hash_obj, input_signature, process_obj, jit_config)(*args, **kwargs)
942
782
  return out
943
783
 
944
784
  return staging_specialize
@@ -961,7 +801,6 @@ def jit(fn=None, mode="PSJit", input_signature=None, hash_args=None, jit_config=
961
801
  if func.__code__.co_flags & UNSUPPORTED_CODE_TYPE:
962
802
  return decorated
963
803
 
964
- _update_graph_executor_config(jit_config)
965
804
  config = dict()
966
805
  if isinstance(jit_config, JitConfig):
967
806
  config.update(jit_config.jit_config_dict)
@@ -969,7 +808,7 @@ def jit(fn=None, mode="PSJit", input_signature=None, hash_args=None, jit_config=
969
808
  config.update(jit_config)
970
809
  jit_mode_pi_enable()
971
810
 
972
- if jit_mode_pi_compile(func, config, input_signature) is False:
811
+ if jit_mode_pi_compile(func, config) is False:
973
812
  logger.warning('add fn {} to compile failed '.format(func))
974
813
 
975
814
  return decorated
@@ -1549,18 +1388,6 @@ class _PyNativeExecutor:
1549
1388
  """
1550
1389
  self._executor.set_grad_flag(flag)
1551
1390
 
1552
- def set_async_for_graph(self, flag):
1553
- """
1554
- Set the flag for graph async run.
1555
-
1556
- Args:
1557
- flag (bool): Specifying whether enable graph async run.
1558
-
1559
- Return:
1560
- None.
1561
- """
1562
- self._executor.set_async_for_graph(flag)
1563
-
1564
1391
  def enable_grad(self):
1565
1392
  """
1566
1393
  The global flag whether needing to calculate gradient.
@@ -1754,7 +1581,7 @@ class _CellGraphExecutor:
1754
1581
  def _set_compile_cache_dep_files(self, phase):
1755
1582
  # If enable compile cache, get the dependency files list
1756
1583
  enable_compile_cache = context.get_context("enable_compile_cache")
1757
- if enable_compile_cache is None:
1584
+ if enable_compile_cache is not True and enable_compile_cache != "1":
1758
1585
  enable_compile_cache = os.getenv('MS_COMPILER_CACHE_ENABLE')
1759
1586
  if enable_compile_cache is True or enable_compile_cache == "1":
1760
1587
  self._graph_executor.set_compile_cache_dep_files(_get_compile_cache_dep_files())
@@ -1793,7 +1620,7 @@ class _CellGraphExecutor:
1793
1620
  phase = phase + '.' + str(obj.create_time) + '.' + str(id(obj)) + '.' + obj.arguments_key
1794
1621
  obj.phase_cache[raw_phase] = phase
1795
1622
  update_auto_dynamic_shape_phase(args, key_id, phase)
1796
- obj.current_phase = phase
1623
+
1797
1624
  if phase in obj.compile_cache and self.has_compiled(phase):
1798
1625
  logger.debug("%r graph has existed.", phase)
1799
1626
  # Release resource should be released when CompileInner won't be executed, such as cur_convert_input_
@@ -1801,10 +1628,6 @@ class _CellGraphExecutor:
1801
1628
  self._graph_executor.clear_compile_arguments_resource()
1802
1629
  return phase, False
1803
1630
 
1804
- full_function_name = obj.__class__.__name__ + '.' + str(obj.instance_count) + '.' + str(id(type(obj)))
1805
- echo_function_name = obj.__class__.__name__
1806
- _check_recompile(obj, args, kwargs, full_function_name, obj.create_time, echo_function_name)
1807
-
1808
1631
  obj.check_names()
1809
1632
  _check_full_batch()
1810
1633
  self._set_dataset_mode(obj)
@@ -1832,6 +1655,7 @@ class _CellGraphExecutor:
1832
1655
  elif 'skip_auto_parallel_compile' not in obj.get_flags().keys():
1833
1656
  obj.parameter_layout_dict = self._graph_executor.get_parameter_layout(phase)
1834
1657
  obj.parallel_parameter_name_list = self._graph_executor.get_parallel_parameter_name_list(phase)
1658
+
1835
1659
  if "export.air" in phase:
1836
1660
  self._build_data_graph(obj, phase)
1837
1661
  elif BROADCAST_PHASE not in phase and _get_parameter_broadcast():
@@ -1871,18 +1695,6 @@ class _CellGraphExecutor:
1871
1695
  """
1872
1696
  return self._graph_executor.has_compiled(phase)
1873
1697
 
1874
- def flops_collection(self, phase='train'):
1875
- """
1876
- Specify whether have been compiled.
1877
-
1878
- Args:
1879
- phase (str): The phase name. Default: 'predict'.
1880
-
1881
- Returns:
1882
- bool, specifies whether the specific graph has been compiled.
1883
- """
1884
- return self._graph_executor.flops_collection(phase)
1885
-
1886
1698
  @_wrap_func
1887
1699
  def _exec_pip(self, obj, *args, phase=''):
1888
1700
  """Execute the generated pipeline."""
@@ -1915,14 +1727,6 @@ class _CellGraphExecutor:
1915
1727
  """Clear the memory resource of a network."""
1916
1728
  self._graph_executor.del_net_res(obj, net_id)
1917
1729
 
1918
- def inc_graph_cell_count(self):
1919
- """Increase the count of GraphCell instance."""
1920
- self._graph_executor.inc_graph_cell_count()
1921
-
1922
- def dec_graph_cell_count(self):
1923
- """Decrease the count of GraphCell instance."""
1924
- self._graph_executor.dec_graph_cell_count()
1925
-
1926
1730
  def _get_branch_control_input(self):
1927
1731
  if ('obf_ratio' not in self.obfuscate_config.keys()) or (
1928
1732
  'obf_random_seed' not in self.obfuscate_config.keys()):
@@ -2029,21 +1833,7 @@ def _bind_device_context():
2029
1833
  _bind_device_ctx()
2030
1834
 
2031
1835
 
2032
- def flops_collection(phase='train'):
2033
- """
2034
- Recycle memory used by MindSpore.
2035
- When train multi Neural network models in one process, memory used by MindSpore is very large,
2036
- this is because MindSpore cached runtime memory for every model.
2037
- To recycle these cached memory, users can call this function after training of one model.
2038
-
2039
- Examples:
2040
- >>> import mindspore as ms
2041
- >>> ms.ms_memory_recycle()
2042
- """
2043
- return _cell_graph_executor.flops_collection(phase)
2044
-
2045
-
2046
1836
  _cell_graph_executor = _CellGraphExecutor()
2047
1837
  _pynative_executor = _PyNativeExecutor()
2048
1838
 
2049
- __all__ = ['ms_function', 'ms_memory_recycle', 'ms_class', 'jit', 'jit_class', 'flops_collection']
1839
+ __all__ = ['ms_function', 'ms_memory_recycle', 'ms_class', 'jit', 'jit_class']
mindspore/common/dtype.py CHANGED
@@ -22,9 +22,6 @@ from inspect import isfunction
22
22
  import numpy as np
23
23
  from mindspore._c_expression import typing
24
24
  from mindspore._c_expression.typing import Type
25
- from mindspore._c_expression.np_dtypes import np_version_valid
26
- if np_version_valid(False):
27
- from mindspore._c_expression.np_dtypes import bfloat16 as np_bfloat16
28
25
 
29
26
  __dtype__ = [
30
27
  "int8", "byte",
@@ -263,7 +260,8 @@ def dtype_to_nptype(type_):
263
260
  >>> ms.dtype_to_nptype(ms.int8)
264
261
  <class 'numpy.int8'>
265
262
  """
266
- _dtype_nptype_dict = {
263
+
264
+ return {
267
265
  bool_: np.bool_,
268
266
  int8: np.int8,
269
267
  int16: np.int16,
@@ -278,10 +276,8 @@ def dtype_to_nptype(type_):
278
276
  float64: np.float64,
279
277
  complex64: np.complex64,
280
278
  complex128: np.complex128,
281
- }
282
- if np_version_valid(False):
283
- _dtype_nptype_dict.update({bfloat16: np_bfloat16})
284
- return _dtype_nptype_dict[type_]
279
+ bfloat16: np.float32,
280
+ }[type_]
285
281
 
286
282
 
287
283
  def dtype_to_pytype(type_):
mindspore/common/dump.py CHANGED
@@ -25,14 +25,17 @@ def set_dump(target, enabled=True):
25
25
  Enable or disable dump for the `target` and its contents.
26
26
 
27
27
  `target` should be an instance of :class:`mindspore.nn.Cell` or :class:`mindspore.ops.Primitive` .
28
- Please note that this API takes effect only when Synchronous Dump is enabled and the `dump_mode`
28
+ Please note that this API takes effect only when Asynchronous Dump is enabled and the `dump_mode`
29
29
  field in dump config file is ``"2"`` . See the `dump document
30
30
  <https://www.mindspore.cn/tutorials/experts/en/master/debug/dump.html>`_ for details.
31
31
  The default enabled status for
32
32
  a :class:`mindspore.nn.Cell` or :class:`mindspore.ops.Primitive` is False.
33
33
 
34
+ .. warning::
35
+ This is an experimental API that is subject to change or deletion. It is not supported for 2.3 version.
36
+
34
37
  Note:
35
- 1. This API is only effective for GRAPH_MODE whose graph compilation level is O0/O1 with Ascend backend.
38
+ 1. This API is only effective for GRAPH_MODE with Ascend backend.
36
39
  2. This API only supports being called before training starts.
37
40
  If you call this API during training, it may not be effective.
38
41
  3. After using `set_dump(Cell, True)` , operators in forward and backward
@@ -14,6 +14,7 @@
14
14
  # ============================================================================
15
15
  """JitConfig for compile."""
16
16
 
17
+
17
18
  class JitConfig:
18
19
  """
19
20
  Jit config for compile.
@@ -21,7 +22,6 @@ class JitConfig:
21
22
  Args:
22
23
  jit_level (str, optional): Used to control the compilation optimization level.
23
24
  Supports ["O0", "O1", "O2"]. Default: ``""`` , The framework automatically selects the execution method.
24
- Not recommended, it is recommended to use the jit decorator.
25
25
 
26
26
  - ``"O0"``: Except for optimizations that may affect functionality, all other optimizations are turned off,
27
27
  adopt KernelByKernel execution mode.
@@ -20,25 +20,19 @@ from functools import wraps
20
20
  from mindspore import log as logger
21
21
 
22
22
 
23
- def lazy_inline(fn=None, attrs=None, policy=None):
23
+ def lazy_inline(fn=None, attrs=None):
24
24
  """
25
- Make the cell to be reusable. The corresponding sub graph will not be inline at first
26
- and will be inline with the policy.
25
+ Make the cell to be reusable. The corresponding sub graph will not be inline at first.
27
26
  Registering the decorator of the built-in function `__init__` of a cell, the decorator
28
27
  will add the parameters of `__init__` according to the `attrs` as the attributes of this cell.
29
28
 
30
29
  .. warning::
31
30
  This feature is only supported on Ascend and is not supported on other hardwares.
32
31
  The construct parameters must be positional or key word arguments and have not default values.
33
- The cell has not switch sub graph.
34
32
 
35
33
  Args:
36
34
  fn (function): `__init__` function of a cell.
37
35
  attrs (Union[list[string], string]): The attributes list to add for the cell.
38
- policy (Union[None, "front"]): The policy of inline. Default is None.
39
-
40
- - ``None``: The cell will be compiled to sub graph and will not be inline.
41
- - ``"front"``: The cell will be compiled to sub graph first and will be inline at front end.
42
36
 
43
37
  Returns:
44
38
  function, original function.
@@ -210,12 +204,6 @@ def lazy_inline(fn=None, attrs=None, policy=None):
210
204
  del new_args['self']
211
205
  new_args = new_args.values()
212
206
  fn(self, *args, **kwargs)
213
-
214
- if isinstance(policy, str) and policy == "front":
215
- self.no_inline = False
216
- elif policy is not None:
217
- raise ValueError(f"policy must be None or 'front'")
218
-
219
207
  if attrs is None:
220
208
  self.cell_init_args = "lazy_inline_" + type(self).__name__ + str(new_args)
221
209
  return