mindspore 2.3.0__cp39-none-any.whl → 2.3.0rc2__cp39-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mindspore might be problematic. Click here for more details.

Files changed (423) hide show
  1. mindspore/.commit_id +1 -1
  2. mindspore/Third_Party_Open_Source_Software_Notice +0 -1512
  3. mindspore/__init__.py +1 -2
  4. mindspore/_c_dataengine.cpython-39-aarch64-linux-gnu.so +0 -0
  5. mindspore/_c_expression.cpython-39-aarch64-linux-gnu.so +0 -0
  6. mindspore/_c_mindrecord.cpython-39-aarch64-linux-gnu.so +0 -0
  7. mindspore/_checkparam.py +25 -5
  8. mindspore/_extends/graph_kernel/model/graph_parallel.py +1 -1
  9. mindspore/_extends/parse/__init__.py +2 -2
  10. mindspore/_extends/parse/compile_config.py +0 -29
  11. mindspore/_extends/parse/namespace.py +2 -2
  12. mindspore/_extends/parse/parser.py +5 -21
  13. mindspore/_extends/parse/resources.py +7 -5
  14. mindspore/_extends/parse/standard_method.py +59 -40
  15. mindspore/_mindspore_offline_debug.cpython-39-aarch64-linux-gnu.so +0 -0
  16. mindspore/amp.py +5 -26
  17. mindspore/bin/cache_admin +0 -0
  18. mindspore/bin/cache_server +0 -0
  19. mindspore/boost/adasum.py +1 -1
  20. mindspore/boost/base.py +1 -1
  21. mindspore/boost/boost_cell_wrapper.py +1 -1
  22. mindspore/boost/grad_freeze.py +2 -2
  23. mindspore/boost/less_batch_normalization.py +6 -9
  24. mindspore/common/__init__.py +1 -8
  25. mindspore/common/_register_for_tensor.py +9 -8
  26. mindspore/common/api.py +65 -275
  27. mindspore/common/dtype.py +4 -8
  28. mindspore/common/dump.py +5 -2
  29. mindspore/common/jit_config.py +1 -1
  30. mindspore/common/lazy_inline.py +2 -14
  31. mindspore/common/parameter.py +15 -14
  32. mindspore/common/recompute.py +5 -20
  33. mindspore/common/sparse_tensor.py +6 -21
  34. mindspore/common/tensor.py +52 -100
  35. mindspore/communication/__init__.py +11 -6
  36. mindspore/communication/management.py +94 -92
  37. mindspore/context.py +18 -180
  38. mindspore/dataset/engine/datasets.py +46 -69
  39. mindspore/dataset/engine/datasets_user_defined.py +53 -72
  40. mindspore/dataset/engine/datasets_vision.py +2 -2
  41. mindspore/dataset/engine/queue.py +38 -56
  42. mindspore/dataset/engine/validators.py +5 -11
  43. mindspore/dataset/vision/__init__.py +5 -5
  44. mindspore/dataset/vision/c_transforms.py +5 -5
  45. mindspore/dataset/vision/py_transforms_util.py +1 -1
  46. mindspore/dataset/vision/transforms.py +46 -591
  47. mindspore/dataset/vision/utils.py +1 -121
  48. mindspore/dataset/vision/validators.py +3 -9
  49. mindspore/hal/__init__.py +1 -7
  50. mindspore/hal/device.py +1 -1
  51. mindspore/include/api/model.h +0 -3
  52. mindspore/include/dataset/vision.h +2 -54
  53. mindspore/include/mindapi/base/types.h +0 -1
  54. mindspore/lib/libdnnl.so.2 +0 -0
  55. mindspore/lib/libmindspore.so +0 -0
  56. mindspore/lib/libmindspore_backend.so +0 -0
  57. mindspore/lib/libmindspore_common.so +0 -0
  58. mindspore/lib/libmindspore_core.so +0 -0
  59. mindspore/lib/libmindspore_glog.so.0 +0 -0
  60. mindspore/lib/libmindspore_gpr.so.15 +0 -0
  61. mindspore/lib/libmindspore_grpc++.so.1 +0 -0
  62. mindspore/lib/libmindspore_grpc.so.15 +0 -0
  63. mindspore/lib/libmindspore_shared_lib.so +0 -0
  64. mindspore/lib/libmpi_adapter.so +0 -0
  65. mindspore/lib/libmpi_collective.so +0 -0
  66. mindspore/lib/libnnacl.so +0 -0
  67. mindspore/lib/libopencv_core.so.4.5 +0 -0
  68. mindspore/lib/libps_cache.so +0 -0
  69. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +0 -35
  70. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/ai_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
  71. mindspore/lib/plugin/ascend/custom_aicore_ops/op_impl/vector_core/tbe/custom_aicore_ops_impl/kv_cache_mgr.py +0 -2
  72. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
  73. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +0 -72
  74. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
  75. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/{aclnn_all_finite.h → aclnn_add_custom.h} +11 -9
  76. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_decoder_kv_cache.h +1 -1
  77. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/include/aclnn_prompt_kv_cache.h +1 -1
  78. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_api/lib/libcust_opapi.so +0 -0
  79. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend310p/aic-ascend310p-ops-info.json +12 -184
  80. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910/aic-ascend910-ops-info.json +15 -7
  81. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/config/ascend910b/aic-ascend910b-ops-info.json +15 -7
  82. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.cpp +81 -0
  83. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/add_custom.py +134 -0
  84. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/decoder_kv_cache.py +31 -77
  85. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/prompt_kv_cache.py +31 -77
  86. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/lib/linux/aarch64/libcust_opmaster_rt2.0.so +0 -0
  87. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/op_tiling/liboptiling.so +0 -0
  88. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/inc/op_proto.h +5 -4
  89. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_proto/lib/linux/aarch64/libcust_opsproto_rt2.0.so +0 -0
  90. mindspore/lib/plugin/ascend/libascend_collective.so +0 -0
  91. mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
  92. mindspore/lib/plugin/ascend/libhccl_plugin.so +0 -0
  93. mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
  94. mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
  95. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/DeviceBin +0 -0
  96. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/PkgInspect +0 -0
  97. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/op_man +0 -0
  98. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +286 -275
  99. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_cann_host.so +0 -0
  100. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_host.so +0 -0
  101. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops.so +0 -0
  102. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
  103. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/add_impl.h +0 -1
  104. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +0 -1
  105. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -3
  106. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/backend_param.h +0 -5
  107. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/cast/cast_tiling.h +45 -1
  108. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/compare/compare_impl.h +0 -1
  109. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_impl.h +4 -8
  110. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/flash_attention_score_tiling.h +4 -11
  111. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/flash_attention_score/kernel/flash_attention_score_mix_hwsync.h +0 -18
  112. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_kernel.h +0 -6
  113. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/internal_rtbackend.h +75 -1
  114. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/kernel/matmul.h +5 -5
  115. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/matmul_impl.h +3 -18
  116. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_common_tiling.h +5 -5
  117. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/pp_matmul_info.h +2 -2
  118. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/tiling_data.h +3 -36
  119. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/kernel/matmul_stridedslice_fusion.h +2 -2
  120. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/matmul_stridedslice_fusion_impl.h +4 -22
  121. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +2 -16
  122. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/kernel/paged_attention_mix_hwsync.h +3 -1
  123. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_impl.h +4 -5
  124. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_tiling.h +4 -9
  125. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/attention_param.h +2 -5
  126. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +0 -1
  127. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_qkv_param.h +4 -10
  128. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +12 -0
  129. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +0 -1
  130. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +0 -1
  131. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +1 -1
  132. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/backend.h +2 -10
  133. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/elewise_utils.h +1 -5
  134. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log.h +0 -1
  135. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +0 -17
  136. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/math.h +7 -2
  137. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
  138. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
  139. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layernorm_impl.so +0 -0
  140. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_impl.so +0 -0
  141. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_impl.so +0 -0
  142. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_impl.so +0 -0
  143. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_impl.so +0 -0
  144. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_impl.so +0 -0
  145. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_stridedslice_fusion_impl.so +0 -0
  146. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
  147. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libnot_equal_impl.so +0 -0
  148. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_impl.so +0 -0
  149. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_impl.so +0 -0
  150. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
  151. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
  152. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_full_mix.o +0 -0
  153. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
  154. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
  155. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
  156. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_full_mix.o +0 -0
  157. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
  158. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bnsd_full_mix.o +0 -0
  159. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bsh_full_mix.o +0 -0
  160. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bnsd_full_mix.o +0 -0
  161. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bsh_full_mix.o +0 -0
  162. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblcal.so +0 -0
  163. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
  164. mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
  165. mindspore/mindrecord/filewriter.py +2 -2
  166. mindspore/mint/__init__.py +40 -720
  167. mindspore/mint/nn/__init__.py +7 -89
  168. mindspore/mint/nn/functional.py +16 -165
  169. mindspore/mint/optim/adamw.py +16 -15
  170. mindspore/nn/__init__.py +2 -0
  171. mindspore/nn/cell.py +98 -97
  172. mindspore/nn/extend/basic.py +2 -2
  173. mindspore/nn/extend/embedding.py +1 -1
  174. mindspore/nn/extend/layer/normalization.py +5 -7
  175. mindspore/nn/generator.py +297 -0
  176. mindspore/nn/layer/activation.py +3 -4
  177. mindspore/nn/layer/basic.py +16 -79
  178. mindspore/nn/layer/conv.py +8 -17
  179. mindspore/nn/layer/embedding.py +4 -1
  180. mindspore/nn/layer/math.py +1 -1
  181. mindspore/nn/layer/normalization.py +1 -1
  182. mindspore/nn/layer/pooling.py +0 -5
  183. mindspore/nn/layer/rnn_cells.py +2 -2
  184. mindspore/nn/loss/loss.py +19 -19
  185. mindspore/nn/optim/adasum.py +1 -1
  186. mindspore/nn/optim/sgd.py +2 -3
  187. mindspore/nn/probability/distribution/exponential.py +1 -1
  188. mindspore/nn/probability/distribution/geometric.py +1 -1
  189. mindspore/nn/probability/distribution/logistic.py +1 -1
  190. mindspore/nn/wrap/cell_wrapper.py +1 -25
  191. mindspore/nn/wrap/loss_scale.py +1 -24
  192. mindspore/numpy/array_ops.py +1 -5
  193. mindspore/numpy/dtypes.py +3 -3
  194. mindspore/numpy/math_ops.py +8 -8
  195. mindspore/ops/__init__.py +1 -1
  196. mindspore/ops/_grad_experimental/grad_comm_ops.py +16 -75
  197. mindspore/ops/_vmap/vmap_array_ops.py +0 -27
  198. mindspore/ops/_vmap/vmap_math_ops.py +1 -29
  199. mindspore/ops/_vmap/vmap_nn_ops.py +18 -19
  200. mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +8 -34
  201. mindspore/ops/auto_generate/gen_arg_dtype_cast.py +9 -2
  202. mindspore/ops/auto_generate/gen_arg_handler.py +0 -26
  203. mindspore/ops/auto_generate/gen_extend_func.py +27 -603
  204. mindspore/ops/auto_generate/gen_ops_def.py +203 -993
  205. mindspore/ops/auto_generate/gen_ops_prim.py +402 -1946
  206. mindspore/ops/auto_generate/pyboost_inner_prim.py +20 -90
  207. mindspore/ops/composite/base.py +6 -3
  208. mindspore/ops/composite/math_ops.py +1 -1
  209. mindspore/ops/composite/multitype_ops/_compile_utils.py +17 -24
  210. mindspore/ops/composite/multitype_ops/_constexpr_utils.py +1 -1
  211. mindspore/ops/extend/__init__.py +3 -2
  212. mindspore/ops/extend/array_func.py +51 -10
  213. mindspore/ops/extend/nn_func.py +78 -2
  214. mindspore/ops/function/__init__.py +13 -8
  215. mindspore/ops/function/array_func.py +179 -455
  216. mindspore/ops/function/clip_func.py +1 -1
  217. mindspore/ops/function/grad/grad_func.py +3 -3
  218. mindspore/ops/function/math_func.py +103 -117
  219. mindspore/ops/function/nn_func.py +163 -275
  220. mindspore/ops/function/other_func.py +2 -2
  221. mindspore/ops/function/random_func.py +69 -202
  222. mindspore/ops/function/sparse_func.py +4 -4
  223. mindspore/ops/functional.py +327 -332
  224. mindspore/ops/operations/__init__.py +3 -13
  225. mindspore/ops/operations/_grad_ops.py +27 -3
  226. mindspore/ops/operations/_inner_ops.py +356 -53
  227. mindspore/ops/operations/_rl_inner_ops.py +2 -2
  228. mindspore/ops/operations/_tensor_array.py +8 -8
  229. mindspore/ops/operations/array_ops.py +65 -82
  230. mindspore/ops/operations/comm_ops.py +93 -784
  231. mindspore/ops/operations/custom_ops.py +28 -51
  232. mindspore/ops/operations/debug_ops.py +4 -4
  233. mindspore/ops/operations/inner_ops.py +2 -2
  234. mindspore/ops/operations/manually_defined/ops_def.py +4 -304
  235. mindspore/ops/operations/math_ops.py +50 -3
  236. mindspore/ops/operations/nn_ops.py +247 -14
  237. mindspore/ops/operations/other_ops.py +3 -3
  238. mindspore/ops/operations/random_ops.py +1 -1
  239. mindspore/ops/operations/sparse_ops.py +1 -1
  240. mindspore/ops/primitive.py +8 -9
  241. mindspore/ops/silent_check.py +5 -5
  242. mindspore/ops_generate/arg_dtype_cast.py +9 -2
  243. mindspore/ops_generate/arg_handler.py +0 -26
  244. mindspore/ops_generate/gen_aclnn_implement.py +4 -1
  245. mindspore/ops_generate/gen_ops.py +4 -26
  246. mindspore/ops_generate/gen_pyboost_func.py +12 -41
  247. mindspore/ops_generate/gen_utils.py +0 -21
  248. mindspore/ops_generate/pyboost_utils.py +2 -7
  249. mindspore/ops_generate/template.py +0 -1
  250. mindspore/parallel/_auto_parallel_context.py +1 -21
  251. mindspore/parallel/_tensor.py +5 -0
  252. mindspore/parallel/_transformer/transformer.py +1 -1
  253. mindspore/parallel/_utils.py +1 -15
  254. mindspore/parallel/algo_parameter_config.py +3 -1
  255. mindspore/parallel/checkpoint_transform.py +9 -12
  256. mindspore/parallel/cluster/process_entity/_api.py +29 -28
  257. mindspore/parallel/cluster/process_entity/_utils.py +3 -13
  258. mindspore/parallel/cluster/run.py +16 -13
  259. mindspore/parallel/parameter_broadcast.py +2 -2
  260. mindspore/parallel/shard.py +17 -31
  261. mindspore/profiler/__init__.py +2 -3
  262. mindspore/profiler/common/util.py +2 -107
  263. mindspore/profiler/envprofiling.py +1 -1
  264. mindspore/profiler/parser/ascend_analysis/constant.py +21 -8
  265. mindspore/profiler/parser/ascend_analysis/file_manager.py +0 -82
  266. mindspore/profiler/parser/ascend_analysis/function_event.py +28 -43
  267. mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +27 -49
  268. mindspore/profiler/parser/ascend_analysis/fwk_file_parser.py +10 -15
  269. mindspore/profiler/parser/ascend_analysis/msprof_timeline_parser.py +20 -25
  270. mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +5 -5
  271. mindspore/profiler/parser/ascend_analysis/trace_event_manager.py +1 -10
  272. mindspore/profiler/parser/ascend_hccl_generator.py +1 -4
  273. mindspore/profiler/parser/ascend_msprof_exporter.py +22 -43
  274. mindspore/profiler/parser/ascend_timeline_generator.py +5 -7
  275. mindspore/profiler/parser/minddata_parser.py +3 -72
  276. mindspore/profiler/profiling.py +59 -176
  277. mindspore/rewrite/api/node.py +1 -1
  278. mindspore/rewrite/common/namespace.py +5 -5
  279. mindspore/rewrite/parsers/assign_parser.py +0 -2
  280. mindspore/rewrite/parsers/class_def_parser.py +4 -8
  281. mindspore/run_check/_check_version.py +1 -1
  282. mindspore/scipy/fft.py +3 -1
  283. mindspore/scipy/linalg.py +3 -2
  284. mindspore/scipy/ops.py +3 -5
  285. mindspore/scipy/optimize/__init__.py +2 -2
  286. mindspore/train/__init__.py +4 -4
  287. mindspore/train/anf_ir_pb2.py +2 -8
  288. mindspore/train/callback/__init__.py +2 -5
  289. mindspore/train/callback/_backup_and_restore.py +2 -2
  290. mindspore/train/callback/_checkpoint.py +16 -104
  291. mindspore/train/callback/_landscape.py +1 -1
  292. mindspore/train/callback/_time_monitor.py +1 -1
  293. mindspore/train/data_sink.py +4 -5
  294. mindspore/train/dataset_helper.py +20 -45
  295. mindspore/train/model.py +38 -266
  296. mindspore/train/serialization.py +105 -256
  297. mindspore/train/summary/_summary_adapter.py +1 -1
  298. mindspore/version.py +1 -1
  299. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/METADATA +2 -2
  300. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/RECORD +303 -420
  301. mindspore/_extends/pijit/__init__.py +0 -23
  302. mindspore/_extends/pijit/pijit_func_white_list.py +0 -343
  303. mindspore/common/file_system.py +0 -48
  304. mindspore/common/generator.py +0 -260
  305. mindspore/common/no_inline.py +0 -54
  306. mindspore/common/np_dtype.py +0 -25
  307. mindspore/communication/comm_func.py +0 -1140
  308. mindspore/hal/memory.py +0 -326
  309. mindspore/lib/libavcodec.so.59 +0 -0
  310. mindspore/lib/libavdevice.so.59 +0 -0
  311. mindspore/lib/libavfilter.so.8 +0 -0
  312. mindspore/lib/libavformat.so.59 +0 -0
  313. mindspore/lib/libavutil.so.57 +0 -0
  314. mindspore/lib/libmindspore_np_dtype.so +0 -0
  315. mindspore/lib/libswresample.so.4 +0 -0
  316. mindspore/lib/libswscale.so.6 +0 -0
  317. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.cpp +0 -326
  318. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/custom_ascendc_ops_impl/dynamic/all_finite.py +0 -180
  319. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.json +0 -58
  320. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_576ceaeef5870c451cab59af55ea46ad.o +0 -0
  321. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.json +0 -58
  322. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_86a73ff6e28d734c96bb8d3054f7dd18.o +0 -0
  323. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.json +0 -58
  324. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/ascend910b/all_finite/AllFinite_f55e0ebaad1f2f572e43677336992fa0.o +0 -0
  325. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/all_finite.json +0 -109
  326. mindspore/lib/plugin/ascend/custom_ascendc_ops/op_impl/ai_core/tbe/kernel/config/ascend910b/binary_info_config.json +0 -38
  327. mindspore/lib/plugin/ascend/custom_compiler/OWNERS +0 -12
  328. mindspore/lib/plugin/ascend/custom_compiler/setup.py +0 -255
  329. mindspore/lib/plugin/ascend/custom_compiler/start.sh +0 -26
  330. mindspore/lib/plugin/ascend/custom_compiler/template.json +0 -40
  331. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme.h +0 -24
  332. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/acme_op.h +0 -69
  333. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/base_type.h +0 -133
  334. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_creator.h +0 -32
  335. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/op_param.h +0 -35
  336. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/include/tiling_info.h +0 -60
  337. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/kernel_register.h +0 -37
  338. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/platform_configs.h +0 -89
  339. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/core/platform/rt_funcs.h +0 -135
  340. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/add_op.h +0 -34
  341. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_backoff_base.h +0 -62
  342. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_elewise_op.h +0 -33
  343. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_ops.h +0 -88
  344. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/asd_pa_op.h +0 -45
  345. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/cast_op.h +0 -52
  346. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/ops/host_src/matmul_op.h +0 -95
  347. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/asd_utils.h +0 -84
  348. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/acme/src/utils/comm_utils.h +0 -61
  349. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp32.h +0 -224
  350. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/and_impl.h +0 -29
  351. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/div_impl.h +0 -29
  352. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_impl.h +0 -48
  353. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/elewise_binary_tiling.h +0 -25
  354. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/and_kernel.h +0 -46
  355. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/div_kernel.h +0 -46
  356. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_base.h +0 -260
  357. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/elewise_binary_kernel.h +0 -35
  358. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/max_kernel.h +0 -66
  359. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/min_kernel.h +0 -66
  360. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/mul_kernel.h +0 -66
  361. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/kernel/or_kernel.h +0 -46
  362. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/max_impl.h +0 -29
  363. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/min_impl.h +0 -29
  364. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/mul_impl.h +0 -29
  365. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_binary/or_impl.h +0 -29
  366. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/abs_impl.h +0 -29
  367. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_impl.h +0 -47
  368. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/elewise_unary_tiling.h +0 -24
  369. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/exp_impl.h +0 -29
  370. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/abs_kernel.h +0 -45
  371. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_base.h +0 -148
  372. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/elewise_unary_kernel.h +0 -31
  373. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/exp_kernel.h +0 -45
  374. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/ln_kernel.h +0 -45
  375. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/not_kernel.h +0 -45
  376. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/reciprocal_kernel.h +0 -45
  377. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/relu_kernel.h +0 -55
  378. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/rsqrt_kernel.h +0 -45
  379. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/kernel/sqrt_kernel.h +0 -45
  380. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/ln_impl.h +0 -29
  381. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/not_impl.h +0 -29
  382. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/reciprocal_impl.h +0 -29
  383. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/relu_impl.h +0 -29
  384. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/rsqrt_impl.h +0 -29
  385. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/elewise_unary/sqrt_impl.h +0 -29
  386. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_impl.h +0 -45
  387. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/grouped_matmul_tiling.h +0 -187
  388. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul.h +0 -245
  389. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_interface.h +0 -24
  390. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/kernel/grouped_matmul_utils.h +0 -111
  391. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/grouped_matmul/tiling_data.h +0 -54
  392. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/compare_param.h +0 -31
  393. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/elewise_param.h +0 -41
  394. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/grouped_matmul_param.h +0 -40
  395. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/profiling_util.h +0 -364
  396. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_utils.h +0 -69
  397. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_creator.h +0 -39
  398. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/register/kernel_registry.h +0 -114
  399. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/utils.h +0 -98
  400. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.json +0 -19
  401. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix.o +0 -0
  402. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aic_0.o +0 -0
  403. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MatMulPostFusionMixTactic/matmul_postfusion_mix_mix_aiv_0.o +0 -0
  404. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.json +0 -19
  405. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix.o +0 -0
  406. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aic_0.o +0 -0
  407. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/MultiMatMulPostFusionMixTactic/multi_matmul_postfusion_mix_mix_aiv_0.o +0 -0
  408. mindspore/mint/linalg/__init__.py +0 -22
  409. mindspore/nn/layer/embedding_service.py +0 -531
  410. mindspore/nn/layer/embedding_service_layer.py +0 -393
  411. mindspore/ops/function/reshard_func.py +0 -102
  412. mindspore/ops/operations/_infer_ops.py +0 -19
  413. mindspore/ops/operations/reshard_ops.py +0 -53
  414. mindspore/profiler/common/process_pool.py +0 -41
  415. mindspore/profiler/common/singleton.py +0 -28
  416. mindspore/profiler/parser/ascend_integrate_generator.py +0 -42
  417. mindspore/profiler/parser/ascend_memory_generator.py +0 -185
  418. mindspore/train/callback/_cluster_monitor.py +0 -201
  419. mindspore/train/callback/_flops_collector.py +0 -238
  420. mindspore/train/callback/_mindio_ttp.py +0 -443
  421. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/WHEEL +0 -0
  422. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/entry_points.txt +0 -0
  423. {mindspore-2.3.0.dist-info → mindspore-2.3.0rc2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,297 @@
1
+ # Copyright 2024 Huawei Technologies Co., Ltd
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ============================================================================
15
+ """Generator"""
16
+ import os
17
+
18
+ import numpy as np
19
+
20
+ from mindspore import context
21
+ from mindspore.common.parameter import Parameter
22
+ from mindspore.nn.cell import Cell
23
+ from mindspore.ops.operations import Assign, AssignAdd, Depend
24
+
25
+
26
+ class Generator(Cell):
27
+ """
28
+ A generator that manages the state of random numbers and provides seed and offset for random functions.
29
+ When the seed and offset are fixed, the random function generates the same random sequence.
30
+
31
+ Inputs:
32
+ - **step** (int) - Set the step size for offset update.
33
+
34
+ Outputs:
35
+ Tuple consisting of the seed and offset of generator.
36
+
37
+ Supported Platforms:
38
+ ``Ascend`` ``GPU`` ``CPU``
39
+
40
+ Examples:
41
+ >>> import mindspore as ms
42
+ >>> from mindspore.nn import Generator
43
+ >>> import numpy as np
44
+ >>> np.random.seed(10)
45
+ >>> ms.set_context(mode=1)
46
+ >>> generator = Generator()
47
+ >>> print(generator.get_state())
48
+ (Tensor(shape=[], dtype=Int32, value= 0), Tensor(shape=[], dtype=Int32, value= 0))
49
+ >>> print(generator(12))
50
+ (0, 0)
51
+ >>> print(generator.get_state())
52
+ (Tensor(shape=[], dtype=Int32, value= 0), Tensor(shape=[], dtype=Int32, value= 12))
53
+ >>> generator.manual_seed(20)
54
+ >>> print(generator.get_state())
55
+ (Tensor(shape=[], dtype=Int32, value= 20), Tensor(shape=[], dtype=Int32, value= 0))
56
+ >>> print(generator.seed())
57
+ 1165313289
58
+ >>> print(generator.initial_seed())
59
+ 1165313289
60
+ """
61
+
62
+ def __init__(self):
63
+ super(Generator, self).__init__()
64
+ self._assign = Assign().set_device("CPU")
65
+ self._assign_add = AssignAdd().set_device("CPU")
66
+ self._depend = Depend()
67
+ self._seed = Parameter(0, name="seed", requires_grad=False)
68
+ self._offset = Parameter(0, name="offset", requires_grad=False)
69
+ self._seed_val = 0
70
+ self._offset_val = 0
71
+
72
+ def set_state(self, seed, offset=None): # pylint: disable=redefined-outer-name
73
+ """
74
+ Sets the generator state.
75
+
76
+ Args:
77
+ seed (int): Seed of the generator.
78
+ offset (int, optional): Offset of the generator, default: ``None`` , means ``0``.
79
+ """
80
+ self._seed_val = int(seed)
81
+ self._assign(self._seed, self._seed_val)
82
+ if offset is None:
83
+ offset = 0
84
+ self._offset_val = int(offset)
85
+ self._assign(self._offset, self._offset_val)
86
+
87
+ def get_state(self):
88
+ """
89
+ Get the generator state.
90
+
91
+ Returns:
92
+ Tuple consisting of the seed and offset of generator.
93
+ """
94
+ return self._seed.value(), self._offset.value()
95
+
96
+ def seed(self): # pylint: disable=redefined-outer-name
97
+ """
98
+ Generate random seeds that can be used as seeds for generator.
99
+
100
+ Returns:
101
+ Tensor, randomly generated seeds.
102
+ """
103
+ seed_ = np.random.randint(np.iinfo(np.int32).min, np.iinfo(np.int32).max)
104
+ self.set_state(seed_)
105
+ return self._seed.value()
106
+
107
+ def manual_seed(self, seed): # pylint: disable=redefined-outer-name
108
+ """
109
+ Sets the generator seed.
110
+
111
+ Args:
112
+ seed (int): Sets the generator seed.
113
+
114
+ Returns:
115
+ The generator self.
116
+ """
117
+ self.set_state(seed)
118
+ return self
119
+
120
+ def initial_seed(self):
121
+ """
122
+ Return the initial seed of generator.
123
+
124
+ Returns:
125
+ The initial seed of generator.
126
+ """
127
+ return self._seed.value()
128
+
129
+ def construct(self, step):
130
+ """
131
+ Update the value of offset, and return the seed and the previous offset.
132
+
133
+ Args:
134
+ step (int): Update offset by step.
135
+
136
+ Returns:
137
+ Seed and offset before update.
138
+ """
139
+ offset = self._offset.value()
140
+ step = self._depend(step, offset)
141
+ self._assign_add(self._offset, step)
142
+ return self._seed.value(), offset
143
+
144
+ def __call__(self, step):
145
+ if os.getenv("MS_JIT") != '0' and context.get_context("mode") == context.GRAPH_MODE:
146
+ return super().__call__(step)
147
+
148
+ offset_val = self._offset_val
149
+ self._offset_val += step
150
+ self._offset.set_data(self._offset_val)
151
+ return self._seed_val, offset_val
152
+
153
+
154
+ default_generator_ = None
155
+
156
+
157
+ def _init_default_generator():
158
+ global default_generator_
159
+ default_generator_ = Generator()
160
+ default_generator_.seed()
161
+
162
+
163
+ def default_generator():
164
+ """
165
+ Return the default generator object.
166
+
167
+ When the user does not specify generator, the random operator invokes default generator to generate random numbers.
168
+
169
+ Returns:
170
+ The default generator.
171
+
172
+ Supported Platforms:
173
+ ``Ascend`` ``GPU`` ``CPU``
174
+
175
+ Examples:
176
+ >>> from mindspore.nn import default_generator
177
+ >>> default_gen = default_generator()
178
+ >>> print(type(default_gen))
179
+ <class 'mindspore.nn.generator.Generator'>
180
+ """
181
+ if default_generator_ is None:
182
+ _init_default_generator()
183
+ return default_generator_
184
+
185
+
186
+ def seed(): # pylint: disable=redefined-outer-name
187
+ """
188
+ Generate random seeds that can be used as seeds for default generator.
189
+
190
+ Returns:
191
+ Randomly generated seeds.
192
+
193
+ Supported Platforms:
194
+ ``Ascend`` ``GPU`` ``CPU``
195
+
196
+ Examples:
197
+ >>> import numpy as np
198
+ >>> from mindspore.nn import seed
199
+ >>> np.random.seed(20)
200
+ >>> print(seed())
201
+ 1663920602
202
+ """
203
+ if default_generator_ is None:
204
+ _init_default_generator()
205
+ return default_generator_.seed()
206
+
207
+
208
+ def manual_seed(seed): # pylint: disable=redefined-outer-name
209
+ """
210
+ Sets the default generator seed.
211
+
212
+ Args:
213
+ seed (int): Sets the default generator seed.
214
+
215
+ Returns:
216
+ The default generator self.
217
+
218
+ Supported Platforms:
219
+ ``Ascend`` ``GPU`` ``CPU``
220
+
221
+ Examples:
222
+ >>> from mindspore.nn import manual_seed, initial_seed
223
+ >>> manual_seed(13)
224
+ >>> print(initial_seed())
225
+ 13
226
+ """
227
+ if default_generator_ is None:
228
+ _init_default_generator()
229
+ default_generator_.manual_seed(seed)
230
+
231
+
232
+ def initial_seed():
233
+ """
234
+ Return the initial seed of default generator.
235
+
236
+ Returns:
237
+ The initial seed of default generator.
238
+
239
+ Supported Platforms:
240
+ ``Ascend`` ``GPU`` ``CPU``
241
+
242
+ Examples:
243
+ >>> from mindspore.nn import manual_seed, initial_seed
244
+ >>> manual_seed(14)
245
+ >>> print(initial_seed())
246
+ 14
247
+ """
248
+ if default_generator_ is None:
249
+ _init_default_generator()
250
+ return default_generator_.initial_seed()
251
+
252
+
253
+ def get_rng_state():
254
+ """
255
+ Get the default generator state.
256
+
257
+ Returns:
258
+ Tuple consisting of the seed and offset of default generator.
259
+
260
+ Supported Platforms:
261
+ ``Ascend`` ``GPU`` ``CPU``
262
+
263
+ Examples:
264
+ >>> import numpy as np
265
+ >>> from mindspore.nn import get_rng_state
266
+ >>> np.random.seed(20)
267
+ >>> print(get_rng_state())
268
+ (Tensor(shape=[], dtype=Int32, value= 378518883), Tensor(shape=[], dtype=Int32, value= 0))
269
+ """
270
+ if default_generator_ is None:
271
+ _init_default_generator()
272
+ return default_generator_.get_state()
273
+
274
+
275
+ def set_rng_state(seed, offset=None): # pylint: disable=redefined-outer-name
276
+ """
277
+ Sets the default generator state.
278
+
279
+ Args:
280
+ seed (int): Seed of the default generator.
281
+ offset (int, optional): Offset of the default generator, default: ``None`` , means ``0``.
282
+
283
+ Supported Platforms:
284
+ ``Ascend`` ``GPU`` ``CPU``
285
+
286
+ Examples:
287
+ >>> from mindspore.nn import set_rng_state, get_rng_state
288
+ >>> set_rng_state(10)
289
+ >>> print(get_rng_state())
290
+ (Tensor(shape=[], dtype=Int32, value= 10), Tensor(shape=[], dtype=Int32, value= 0))
291
+ """
292
+ if default_generator_ is None:
293
+ _init_default_generator()
294
+ default_generator_.set_state(seed, offset)
295
+
296
+
297
+ __all__ = ["Generator", "default_generator", "seed", "manual_seed", "initial_seed", "set_rng_state", "get_rng_state"]
@@ -351,17 +351,16 @@ class ELU(Cell):
351
351
 
352
352
  Args:
353
353
  alpha (float): The alpha value of ELU, the data type is float. Default: ``1.0`` .
354
- Only alpha equal to ``1.0`` is supported currently.
355
354
 
356
355
  Inputs:
357
- - **input_x** (Tensor) - The input of ELU is a Tensor of any dimension with data type of float16 or float32.
356
+ - **x** (Tensor) - The input of ELU is a Tensor of any dimension with data type of float16 or float32.
358
357
 
359
358
  Outputs:
360
- Tensor, with the same type and shape as the `input_x`.
359
+ Tensor, with the same type and shape as the `x`.
361
360
 
362
361
  Raises:
363
362
  TypeError: If `alpha` is not a float.
364
- TypeError: If dtype of `input_x` is neither float16 nor float32.
363
+ TypeError: If dtype of `x` is neither float16 nor float32.
365
364
  ValueError: If `alpha` is not equal to 1.0.
366
365
 
367
366
  Supported Platforms:
@@ -28,7 +28,6 @@ from mindspore.common.initializer import initializer, HeUniform, Uniform
28
28
  from mindspore.ops import operations as P
29
29
  from mindspore.ops import functional as F
30
30
  from mindspore.ops.function.nn_func import interpolate_ext
31
- from mindspore.ops.auto_generate import unfold_ext
32
31
  from mindspore.ops.operations import _inner_ops as inner
33
32
  from mindspore.ops.primitive import constexpr, Primitive, _primexpr
34
33
  from mindspore.common.parameter import Parameter
@@ -37,8 +36,8 @@ from mindspore import _checkparam as Validator
37
36
  from mindspore.nn.cell import Cell
38
37
  from mindspore.nn.layer.activation import get_activation
39
38
  from mindspore.common._decorator import deprecated
40
- from mindspore.ops.auto_generate import dropout_ext_op, fold_ext
41
- from mindspore.common.generator import default_generator
39
+ from mindspore.ops.auto_generate import dropout_ext_op
40
+ from mindspore.nn.generator import default_generator
42
41
 
43
42
  __all__ = ['Dropout', 'Flatten', 'Dense', 'ClipByNorm', 'Norm', 'OneHot', 'Pad', 'Unfold', 'Tril', 'Triu',
44
43
  'MatrixDiag', 'MatrixDiagPart', 'MatrixSetDiag', 'L1Regularizer', 'Dropout1d',
@@ -222,20 +221,20 @@ class DropoutExt(Cell):
222
221
 
223
222
  Note:
224
223
  - Each channel will be zeroed out independently on every construct call.
225
- - Parameter `p` means the probability of the element of the input tensor to be zeroed.
224
+ Parameter `p` means the probability of the element of the input tensor to be zeroed.
226
225
 
227
226
  Args:
228
- p (float): The dropout rate of input neurons, E.g. `p` =0.9, dropping out 90% of input neurons.
229
- Default: ``0.5`` .
227
+ p (float): The dropout rate, greater than or equal to 0 and less than 1.
228
+ E.g. rate=0.9, dropping out 90% of input neurons. Default: ``0.5`` .
230
229
 
231
230
  Inputs:
232
- - **x** (Tensor) - The input of Dropout.
231
+ - **x** (Tensor) - The input of Dropout with data type of float16 or float32.
233
232
 
234
233
  Outputs:
235
234
  Tensor, output tensor with the same shape as the `x`.
236
235
 
237
236
  Raises:
238
- TypeError: If the dtype of `p` is not float.
237
+ ValueError: If `p` is not in range [0, 1).
239
238
  ValueError: If length of shape of `x` is less than 1.
240
239
 
241
240
  Supported Platforms:
@@ -256,15 +255,16 @@ class DropoutExt(Cell):
256
255
  def __init__(self, p=0.5):
257
256
  """Initialize DropoutExt."""
258
257
  super(DropoutExt, self).__init__()
258
+ self.generator = default_generator()
259
+ self.dropout = dropout_ext_op
259
260
  self.p = p
260
- self.generator_step = Tensor(1, mstype.int64)
261
261
 
262
262
  def construct(self, x):
263
263
  if not self.training or self.p == 0:
264
264
  return x
265
265
 
266
- seed, offset = default_generator._step(self.generator_step) # pylint: disable=protected-access
267
- out, _ = dropout_ext_op(x, self.p, seed, offset)
266
+ seed, offset = self.generator(1)
267
+ out, _ = self.dropout(x, self.p, seed, offset)
268
268
  return out
269
269
 
270
270
 
@@ -477,16 +477,16 @@ class Upsample(Cell):
477
477
 
478
478
  class UpsampleExt(Cell):
479
479
  r"""
480
- For details, please refer to :func:`mindspore.mint.nn.functional.interpolate`.
480
+ For details, please refer to :func:`mindspore.mint.interpolate`.
481
481
 
482
482
  Supported Platforms:
483
- ``Ascend``
483
+ ``Ascend`` ``GPU`` ``CPU``
484
484
 
485
485
  Examples:
486
486
  >>> import mindspore as ms
487
- >>> from mindspore import nn
487
+ >>> from mindspore import mint
488
488
  >>> x = ms.Tensor([[[[1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0]]]])
489
- >>> upsample = nn.UpsampleExt(size=(5, 5))
489
+ >>> upsample = mint.Upsample(size=(5, 5))
490
490
  >>> out = upsample(x)
491
491
  >>> print(x.asnumpy())
492
492
  [[[[1. 2. 3. 4.]
@@ -535,8 +535,7 @@ class Flatten(Cell):
535
535
  TypeError: If `x` is not a Tensor.
536
536
  TypeError: If `start_dim` or `end_dim` is not int.
537
537
  ValueError: If `start_dim` is greater than `end_dim` after canonicalized.
538
- ValueError: If `start_dim` or `end_dim` is not in range of [-x.dim, x.dim-1]. For example, the default values
539
- are used for the args and the input is a 0-dimensional or 1-dimensional Tensor.
538
+ ValueError: If `start_dim` or `end_dim` is not in range of [-x.dim, x.dim-1].
540
539
 
541
540
  Supported Platforms:
542
541
  ``Ascend`` ``GPU`` ``CPU``
@@ -1131,68 +1130,6 @@ class Unfold(Cell):
1131
1130
  return result
1132
1131
 
1133
1132
 
1134
- class UnfoldExt(Cell):
1135
- r"""
1136
- Extracts sliding local blocks from a batched input tensor.
1137
-
1138
- For details, please refer to :func:`mindspore.mint.nn.functional.unfold`.
1139
-
1140
- Supported Platforms:
1141
- ``Ascend``
1142
-
1143
- Examples:
1144
- >>> import mindspore
1145
- >>> import numpy as np
1146
- >>> from mindspore import Tensor, nn
1147
- >>> input = Tensor(np.random.rand(4, 4, 32, 32), mindspore.float64)
1148
- >>> unfold = nn.UnfoldExt(kernel_size=3, dilation=1, stride=1)
1149
- >>> output = unfold(input)
1150
- >>> print(output.shape)
1151
- (4, 36, 900)
1152
- """
1153
- def __init__(self, kernel_size, dilation=1, padding=0, stride=1):
1154
- super(UnfoldExt, self).__init__()
1155
- self.kernel_size = kernel_size
1156
- self.dilation = dilation
1157
- self.padding = padding
1158
- self.stride = stride
1159
-
1160
- def construct(self, input):
1161
- return unfold_ext(input, self.kernel_size, self.dilation, self.padding, self.stride)
1162
-
1163
-
1164
- class Fold(Cell):
1165
- r"""
1166
- Combines an array of sliding local blocks into a large containing tensor.
1167
-
1168
- For details, please refer to :func:`mindspore.mint.nn.functional.fold`.
1169
-
1170
- Supported Platforms:
1171
- ``Ascend``
1172
-
1173
- Examples:
1174
- >>> import numpy as np
1175
- >>> from mindspore import Tensor, nn
1176
- >>> from mindspore import dtype as mstype
1177
- >>> fold = nn.Fold([8, 8], [2, 2], [2, 2], [2, 2], [2, 2])
1178
- >>> input = Tensor(input_data=np.random.rand(16, 64, 25), dtype=mstype.float32)
1179
- >>> output = fold(input)
1180
- >>> print(output.shape)
1181
- (16, 16, 8, 8)
1182
- """
1183
- def __init__(self, output_size, kernel_size, dilation=1, padding=0, stride=1):
1184
- super(Fold, self).__init__()
1185
- self.output_size = output_size
1186
- self.kernel_size = kernel_size
1187
- self.dilation = dilation
1188
- self.padding = padding
1189
- self.stride = stride
1190
-
1191
- def construct(self, input):
1192
- return fold_ext(input, self.output_size, self.kernel_size,
1193
- self.dilation, self.padding, self.stride)
1194
-
1195
-
1196
1133
  @_primexpr
1197
1134
  def tril(x_shape, x_dtype, k):
1198
1135
  Validator.check_int(len(x_shape), 1, Validator.GE, "x rank", "tril")
@@ -16,7 +16,6 @@
16
16
  from __future__ import absolute_import
17
17
 
18
18
  import math
19
- import numpy as np
20
19
 
21
20
  from mindspore import context
22
21
  from mindspore.ops import operations as P
@@ -542,14 +541,10 @@ class Conv1d(_Conv):
542
541
  stride = (1, stride)
543
542
  dilation = (1, dilation)
544
543
  get_shape = P.Shape()
545
- get_dtype = P.DType()
546
544
  if isinstance(weight_init, Tensor):
547
545
  weight_init_shape = get_shape(weight_init)
548
546
  Validator.check_equal_int(len(weight_init_shape), 3, 'weight_init_shape', self.cls_name)
549
- weight_init_dtype = get_dtype(weight_init)
550
- weight_init_value = weight_init.asnumpy()
551
- weight_init_value = np.expand_dims(weight_init_value, 2)
552
- weight_init = Tensor(weight_init_value, weight_init_dtype)
547
+ weight_init = weight_init.expand_dims(2)
553
548
 
554
549
  super(Conv1d, self).__init__(
555
550
  in_channels,
@@ -708,7 +703,7 @@ class Conv3d(_Conv):
708
703
 
709
704
  Inputs:
710
705
  - **x** (Tensor) - Tensor of shape :math:`(N, C_{in}, D_{in}, H_{in}, W_{in})`.
711
- Currently, input data type support float16 and float32 in CPU and GPU, and only float16 in Ascend.
706
+ Currently input data type only support float16 and float32.
712
707
 
713
708
  Outputs:
714
709
  Tensor of shape is :math:`(N, C_{out}, D_{out}, H_{out}, W_{out})`.
@@ -908,11 +903,11 @@ class Conv3dTranspose(_Conv):
908
903
  group (int): Splits filter into groups, `in_channels` and `out_channels` must be
909
904
  divisible by `group`. Default: ``1`` .
910
905
  output_padding (Union(int, tuple[int])): The number of padding on the depth, height and width directions of
911
- the output. The data type is an integer or a tuple of three integers. If `output_padding` is an integer,
912
- then the depth, height, and width dimension padding are all equal to `output_padding`.
913
- If `output_padding` is a tuple of three integers, then the depth, height, and width padding is equal to
914
- `output_padding[0]`, `output_padding[1]` and `output_padding[2]` respectively.
915
- The value should be greater than or equal to 0.
906
+ the output. The data type is an integer or a tuple of six integers. If `output_padding` is an integer,
907
+ then the head, tail, top, bottom, left, and right padding are all equal to `output_padding`.
908
+ If `output_padding` is a tuple of six integers, then the head, tail, top, bottom, left, and right padding
909
+ is equal to `output_padding[0]`, `output_padding[1]`, `output_padding[2]`, `output_padding[3]`,
910
+ `output_padding[4]` and `output_padding[5]` respectively. The value should be greater than or equal to 0.
916
911
  Default: ``0`` .
917
912
  has_bias (bool): Whether the Conv3dTranspose layer has a bias parameter. Default: ``False`` .
918
913
  weight_init (Union[Tensor, str, Initializer, numbers.Number]): Initialization method of weight parameter.
@@ -1437,14 +1432,10 @@ class Conv1dTranspose(_Conv):
1437
1432
  stride = (1, stride)
1438
1433
  dilation = (1, dilation)
1439
1434
  get_shape = P.Shape()
1440
- get_dtype = P.DType()
1441
1435
  if isinstance(weight_init, Tensor):
1442
1436
  weight_init_shape = get_shape(weight_init)
1443
1437
  Validator.check_equal_int(len(weight_init_shape), 3, 'weight_init_shape', self.cls_name)
1444
- weight_init_dtype = get_dtype(weight_init)
1445
- weight_init_value = weight_init.asnumpy()
1446
- weight_init_value = np.expand_dims(weight_init_value, 2)
1447
- weight_init = Tensor(weight_init_value, weight_init_dtype)
1438
+ weight_init = weight_init.expand_dims(2)
1448
1439
  # out_channels and in_channels swap.
1449
1440
  # cause Conv2DBackpropInput's out_channel refers to Conv2D's out_channel,
1450
1441
  # then Conv1dTranspose's out_channel refers to Conv2DBackpropInput's in_channel.
@@ -124,7 +124,10 @@ class Embedding(Cell):
124
124
  if isinstance(self.init_tensor, Tensor) and self.init_tensor.init is not None:
125
125
  self.init_tensor = self.init_tensor.init_data()
126
126
  init_tensor_type = self.init_tensor.dtype
127
- self.init_tensor = self.init_tensor.asnumpy()
127
+ if init_tensor_type == mstype.bfloat16:
128
+ self.init_tensor = self.init_tensor.float().asnumpy()
129
+ else:
130
+ self.init_tensor = self.init_tensor.asnumpy()
128
131
  self.init_tensor[self.padding_idx] = 0
129
132
  self.init_tensor = Tensor(self.init_tensor, init_tensor_type)
130
133
  self.embedding_table = Parameter(
@@ -136,7 +136,7 @@ class Range(Cell):
136
136
  if delta == 0:
137
137
  raise ValueError(f"For '{self.cls_name}', the 'delta' can not be zero.")
138
138
  data = np.arange(start, limit, delta)
139
- if data.dtype == np.float_:
139
+ if data.dtype == np.float:
140
140
  self.ms_dtype = mstype.float32
141
141
  else:
142
142
  self.ms_dtype = mstype.int32
@@ -1152,7 +1152,7 @@ class GroupNorm(Cell):
1152
1152
 
1153
1153
  def _cal_output(self, x):
1154
1154
  """calculate groupnorm output"""
1155
- return group_norm(x, self.num_groups, self.gamma.to(x.dtype), self.beta.to(x.dtype), self.eps)
1155
+ return group_norm(x, self.num_groups, self.gamma, self.beta, self.eps)
1156
1156
 
1157
1157
  @staticmethod
1158
1158
  @_primexpr
@@ -27,7 +27,6 @@ from mindspore.common import dtype as mstype
27
27
  from mindspore.ops.operations.nn_ops import AdaptiveMaxPool2D
28
28
  from mindspore.ops.operations.nn_ops import AdaptiveMaxPool3D, AdaptiveAvgPool3D
29
29
  from mindspore.nn.cell import Cell
30
- from mindspore._c_expression import MSContext
31
30
 
32
31
  __all__ = ['AvgPool3d', 'MaxPool3d', 'AvgPool2d', 'MaxPool2d', 'AvgPool1d', 'MaxPool1d', 'FractionalMaxPool2d',
33
32
  'FractionalMaxPool3d', 'AdaptiveAvgPool1d', 'AdaptiveMaxPool1d', 'AdaptiveMaxPool2d', 'AdaptiveMaxPool3d',
@@ -1015,12 +1014,8 @@ class AvgPool2d(_PoolNd):
1015
1014
  data_format="NCHW"):
1016
1015
  """Initialize AvgPool2d."""
1017
1016
  super(AvgPool2d, self).__init__(kernel_size, stride, pad_mode, data_format)
1018
- self.ascend_910bc_target = (MSContext.get_instance().get_ascend_soc_version() in ['ascend910b', 'ascend910c'])
1019
1017
  if pad_mode.upper() == 'PAD' or padding != 0 or ceil_mode or not count_include_pad \
1020
1018
  or divisor_override is not None:
1021
- if self.ascend_910bc_target:
1022
- raise ValueError(f"For '{self.cls_name}, the pad_mod 'PAD' is not support in 910B now, "
1023
- f"it will be supported in the future.")
1024
1019
  if self.format == "NHWC":
1025
1020
  raise ValueError(f"For '{self.cls_name}, the 'NHWC' format are not support when 'pad_mode' is 'pad' or "
1026
1021
  f"'padding' is not 0 or 'ceil_mode' is not False or 'count_include_pad' is not True"
@@ -178,7 +178,7 @@ class RNNCell(RNNCellBase):
178
178
  Args:
179
179
  input_size (int): Number of features of input.
180
180
  hidden_size (int): Number of features of hidden layer.
181
- has_bias (bool): Whether the cell has bias :math:`b_{ih}` and :math:`b_{hh}`. Default: ``True`` .
181
+ has_bias (bool): Whether the cell has bias :math:`b_ih` and :math:`b_hh`. Default: ``True`` .
182
182
  nonlinearity (str): The non-linearity to use. Can be either ``"tanh"`` or ``"relu"`` .
183
183
  Default: ``"tanh"`` .
184
184
  dtype (:class:`mindspore.dtype`): Dtype of Parameters. Default: ``mstype.float32`` .
@@ -263,7 +263,7 @@ class LSTMCell(RNNCellBase):
263
263
  Args:
264
264
  input_size (int): Number of features of input.
265
265
  hidden_size (int): Number of features of hidden layer.
266
- has_bias (bool): Whether the cell has bias `b_{ih}` and `b_{hh}`. Default: ``True`` .
266
+ has_bias (bool): Whether the cell has bias `b_ih` and `b_hh`. Default: ``True`` .
267
267
  dtype (:class:`mindspore.dtype`): Dtype of Parameters. Default: ``mstype.float32`` .
268
268
 
269
269
  Inputs: