mindspore 2.3.0rc1__cp37-none-any.whl → 2.3.0rc2__cp37-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mindspore might be problematic. Click here for more details.

Files changed (316) hide show
  1. mindspore/.commit_id +1 -1
  2. mindspore/__init__.py +1 -1
  3. mindspore/_akg/akg/utils/tbe_codegen_utils.py +13 -3
  4. mindspore/_c_dataengine.cpython-37m-aarch64-linux-gnu.so +0 -0
  5. mindspore/_c_expression.cpython-37m-aarch64-linux-gnu.so +0 -0
  6. mindspore/_checkparam.py +20 -0
  7. mindspore/_extends/parse/parser.py +1 -1
  8. mindspore/_extends/parse/standard_method.py +6 -5
  9. mindspore/_mindspore_offline_debug.cpython-37m-aarch64-linux-gnu.so +0 -0
  10. mindspore/amp.py +5 -5
  11. mindspore/bin/cache_admin +0 -0
  12. mindspore/bin/cache_server +0 -0
  13. mindspore/boost/boost_cell_wrapper.py +1 -1
  14. mindspore/boost/group_loss_scale_manager.py +1 -1
  15. mindspore/common/__init__.py +4 -2
  16. mindspore/common/_register_for_recompute.py +48 -0
  17. mindspore/common/_stub_tensor.py +1 -0
  18. mindspore/common/api.py +56 -4
  19. mindspore/common/dtype.py +5 -3
  20. mindspore/common/dump.py +2 -2
  21. mindspore/common/hook_handle.py +51 -4
  22. mindspore/common/initializer.py +1 -1
  23. mindspore/common/jit_config.py +17 -6
  24. mindspore/common/parameter.py +7 -2
  25. mindspore/common/recompute.py +247 -0
  26. mindspore/common/sparse_tensor.py +2 -2
  27. mindspore/common/symbol.py +1 -1
  28. mindspore/common/tensor.py +74 -36
  29. mindspore/communication/__init__.py +3 -3
  30. mindspore/communication/management.py +30 -30
  31. mindspore/context.py +28 -15
  32. mindspore/dataset/__init__.py +5 -5
  33. mindspore/dataset/audio/__init__.py +2 -2
  34. mindspore/dataset/audio/transforms.py +51 -51
  35. mindspore/dataset/callback/ds_callback.py +2 -2
  36. mindspore/dataset/engine/cache_client.py +1 -1
  37. mindspore/dataset/engine/datasets.py +3 -3
  38. mindspore/dataset/engine/datasets_audio.py +14 -14
  39. mindspore/dataset/engine/datasets_standard_format.py +3 -3
  40. mindspore/dataset/engine/datasets_text.py +38 -38
  41. mindspore/dataset/engine/datasets_user_defined.py +3 -3
  42. mindspore/dataset/engine/datasets_vision.py +68 -68
  43. mindspore/dataset/text/__init__.py +3 -3
  44. mindspore/dataset/text/transforms.py +26 -26
  45. mindspore/dataset/transforms/__init__.py +1 -1
  46. mindspore/dataset/vision/__init__.py +3 -3
  47. mindspore/dataset/vision/transforms.py +92 -92
  48. mindspore/dataset/vision/utils.py +1 -1
  49. mindspore/experimental/optim/adadelta.py +2 -2
  50. mindspore/experimental/optim/adagrad.py +2 -2
  51. mindspore/experimental/optim/adam.py +2 -2
  52. mindspore/experimental/optim/adamax.py +2 -2
  53. mindspore/experimental/optim/adamw.py +2 -2
  54. mindspore/experimental/optim/asgd.py +2 -2
  55. mindspore/experimental/optim/lr_scheduler.py +24 -20
  56. mindspore/experimental/optim/nadam.py +2 -2
  57. mindspore/experimental/optim/optimizer.py +1 -1
  58. mindspore/experimental/optim/radam.py +2 -2
  59. mindspore/experimental/optim/rmsprop.py +2 -2
  60. mindspore/experimental/optim/rprop.py +2 -2
  61. mindspore/experimental/optim/sgd.py +2 -2
  62. mindspore/hal/stream.py +2 -0
  63. mindspore/include/mindapi/base/types.h +5 -0
  64. mindspore/lib/libdnnl.so.2 +0 -0
  65. mindspore/lib/libmindspore.so +0 -0
  66. mindspore/lib/libmindspore_backend.so +0 -0
  67. mindspore/lib/libmindspore_common.so +0 -0
  68. mindspore/lib/libmindspore_core.so +0 -0
  69. mindspore/lib/libmindspore_gpr.so.15 +0 -0
  70. mindspore/lib/libmindspore_grpc++.so.1 +0 -0
  71. mindspore/lib/libmindspore_grpc.so.15 +0 -0
  72. mindspore/lib/libmindspore_shared_lib.so +0 -0
  73. mindspore/lib/libopencv_core.so.4.5 +0 -0
  74. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/aicpu_kernel/impl/libcust_cpu_kernels.so +0 -0
  75. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_impl/cpu/config/cust_aicpu_kernel.json +6 -6
  76. mindspore/lib/plugin/ascend/custom_aicpu_ops/op_proto/libcust_op_proto.so +0 -0
  77. mindspore/lib/plugin/ascend/libdvpp_utils.so +0 -0
  78. mindspore/lib/plugin/ascend/liblowlatency_collective.so +0 -0
  79. mindspore/lib/plugin/ascend/libmindspore_cpu_kernels.so +0 -0
  80. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/DeviceBin +0 -0
  81. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/PkgInspect +0 -0
  82. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/bin/op_man +0 -0
  83. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/device/ascend910b/bin/ascend910b.bin +101787 -98559
  84. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_cann_host.so +0 -0
  85. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/host/libasdops_host.so +0 -0
  86. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/base/op_register.h +2 -2
  87. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/params/mix.h +8 -1
  88. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/params/norm.h +5 -3
  89. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/params/reduce.h +2 -2
  90. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/utils/rt/backend/backend.h +3 -3
  91. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/utils/rt/backend/rtbackend.h +3 -3
  92. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/utils/rt/base/types.h +0 -1
  93. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/utils/rt/module/module.h +3 -3
  94. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/include/asdops/utils/svector/svector.h +3 -2
  95. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops.so +0 -0
  96. mindspore/lib/plugin/ascend/ms_kernels_internal/asdops/lib/libasdops_static.a +0 -0
  97. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/add/tiling/add_tiling.h +9 -9
  98. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/apply_rotary_pos_emb_impl.h +2 -6
  99. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb.h +2 -2
  100. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_base.h +460 -0
  101. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_bf16.h +217 -0
  102. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_fp16.h +116 -0
  103. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_tiling.h +16 -24
  104. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/apply_rotary_pos_emb/kernel/apply_rotary_pos_emb_value.h +27 -0
  105. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/asdop/asd_op_impl.h +0 -4
  106. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{attention/FlashAttentionScore_impl.h → flash_attention_score/flash_attention_score_impl.h} +2 -1
  107. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{attention/bs_attention_tiling.h → flash_attention_score/flash_attention_score_tiling.h} +15 -19
  108. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/gelu/tiling/gelu_tiling.h +7 -9
  109. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/lccl/lccl_wrapper.h +58 -0
  110. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul/matmul_impl.h +19 -8
  111. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{matmul → matmul_common}/pp_matmul_common_tiling.h +18 -8
  112. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{matmul → matmul_common}/pp_matmul_info.h +7 -4
  113. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{matmul → matmul_common}/tiling_data.h +44 -6
  114. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_common/tiling_utils.h +65 -0
  115. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/matmul_stridedslice_fusion_impl.h +10 -6
  116. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/op_param.h +4 -1
  117. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/kernel/paged_attention_mix_hwsync.h +41 -0
  118. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{attention/PagedAttention_impl.h → paged_attention/paged_attention_impl.h} +1 -1
  119. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/paged_attention/paged_attention_tiling.h +63 -0
  120. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/add_param.h +2 -2
  121. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{attention_param.h → param/attention_param.h} +11 -2
  122. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/matmul_ext_param.h +37 -0
  123. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/param/sub_param.h +45 -0
  124. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/reshape_and_cache/reshape_and_cache_tiling.h +1 -2
  125. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/kernel/rms_norm.h +23 -0
  126. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/kernel/rms_norm_base.h +175 -0
  127. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/kernel/rms_norm_normal.h +276 -0
  128. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/kernel/rms_norm_split_d.h +280 -0
  129. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/kernel/tiling_data.h +35 -0
  130. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/rms_norm/rms_norm_impl.h +45 -0
  131. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/kernel/sub_kernel.h +20 -0
  132. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_impl.h +47 -0
  133. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/sub/sub_tiling.h +25 -0
  134. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/tune_repo/matmul_table.h +323 -23
  135. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/types.h +15 -4
  136. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/utils/log/log_tiling.h +8 -0
  137. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libAdd_impl.so +0 -0
  138. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libSub_impl.so +0 -0
  139. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_layernorm_impl.so +0 -0
  140. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libadd_rms_norm_impl.so +0 -0
  141. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libapply_rotary_pos_emb_impl.so +0 -0
  142. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libcast_impl.so +0 -0
  143. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libgelu_impl.so +0 -0
  144. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_impl.so +0 -0
  145. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libmatmul_stridedslice_fusion_impl.so +0 -0
  146. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libms_kernels_internal.so +0 -0
  147. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libnot_equal_impl.so +0 -0
  148. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/libreshape_and_cache_impl.so +0 -0
  149. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/lib/librms_norm_impl.so +0 -0
  150. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_full_mix.o +0 -0
  151. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bnsd_tri_mix.o +0 -0
  152. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_full_mix.o +0 -0
  153. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_bf16_bsh_tri_mix.o +0 -0
  154. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_full_mix.o +0 -0
  155. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bnsd_tri_mix.o +0 -0
  156. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_full_mix.o +0 -0
  157. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/flash_attention_score_fp16_bsh_tri_mix.o +0 -0
  158. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bnsd_full_mix.o +0 -0
  159. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_bf16_bsh_full_mix.o +0 -0
  160. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bnsd_full_mix.o +0 -0
  161. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/paged_attention_fp16_bsh_full_mix.o +0 -0
  162. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/include/lcal.h +22 -0
  163. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/include/lcal_comm.h +70 -0
  164. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/include/lcal_types.h +103 -0
  165. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/include/lccl.h +47 -0
  166. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/include/lccl_wrapper.h +58 -0
  167. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/include/lcoc.h +154 -0
  168. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblcal.so +0 -0
  169. mindspore/lib/plugin/ascend/ms_kernels_internal/lccl/lib/liblccl_wrapper.so +0 -0
  170. mindspore/lib/plugin/libmindspore_ascend.so.2 +0 -0
  171. mindspore/log.py +2 -2
  172. mindspore/mint/__init__.py +457 -0
  173. mindspore/mint/nn/__init__.py +430 -0
  174. mindspore/mint/nn/functional.py +424 -0
  175. mindspore/mint/optim/__init__.py +24 -0
  176. mindspore/mint/optim/adamw.py +186 -0
  177. mindspore/multiprocessing/__init__.py +4 -0
  178. mindspore/nn/__init__.py +3 -0
  179. mindspore/nn/cell.py +51 -47
  180. mindspore/nn/extend/__init__.py +29 -0
  181. mindspore/nn/extend/basic.py +140 -0
  182. mindspore/nn/extend/embedding.py +143 -0
  183. mindspore/nn/extend/layer/__init__.py +27 -0
  184. mindspore/nn/extend/layer/normalization.py +107 -0
  185. mindspore/nn/extend/pooling.py +117 -0
  186. mindspore/nn/generator.py +297 -0
  187. mindspore/nn/layer/basic.py +109 -1
  188. mindspore/nn/layer/container.py +2 -2
  189. mindspore/nn/layer/conv.py +6 -6
  190. mindspore/nn/layer/embedding.py +1 -1
  191. mindspore/nn/layer/normalization.py +21 -43
  192. mindspore/nn/layer/padding.py +4 -0
  193. mindspore/nn/optim/ada_grad.py +2 -2
  194. mindspore/nn/optim/adadelta.py +1 -1
  195. mindspore/nn/optim/adafactor.py +1 -1
  196. mindspore/nn/optim/adam.py +7 -7
  197. mindspore/nn/optim/adamax.py +2 -2
  198. mindspore/nn/optim/adasum.py +2 -2
  199. mindspore/nn/optim/asgd.py +2 -2
  200. mindspore/nn/optim/ftrl.py +1 -1
  201. mindspore/nn/optim/lamb.py +3 -3
  202. mindspore/nn/optim/lars.py +1 -1
  203. mindspore/nn/optim/lazyadam.py +2 -2
  204. mindspore/nn/optim/momentum.py +2 -2
  205. mindspore/nn/optim/optimizer.py +2 -2
  206. mindspore/nn/optim/proximal_ada_grad.py +2 -2
  207. mindspore/nn/optim/rmsprop.py +2 -2
  208. mindspore/nn/optim/rprop.py +2 -2
  209. mindspore/nn/optim/sgd.py +2 -2
  210. mindspore/nn/optim/thor.py +2 -2
  211. mindspore/nn/wrap/cell_wrapper.py +9 -9
  212. mindspore/nn/wrap/grad_reducer.py +5 -5
  213. mindspore/ops/_grad_experimental/grad_comm_ops.py +4 -2
  214. mindspore/ops/_vmap/vmap_grad_nn_ops.py +41 -2
  215. mindspore/ops/_vmap/vmap_math_ops.py +27 -8
  216. mindspore/ops/_vmap/vmap_nn_ops.py +66 -8
  217. mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +73 -1
  218. mindspore/ops/auto_generate/gen_arg_dtype_cast.py +12 -3
  219. mindspore/ops/auto_generate/gen_arg_handler.py +24 -0
  220. mindspore/ops/auto_generate/gen_extend_func.py +274 -0
  221. mindspore/ops/auto_generate/gen_ops_def.py +889 -22
  222. mindspore/ops/auto_generate/gen_ops_prim.py +3541 -253
  223. mindspore/ops/auto_generate/pyboost_inner_prim.py +282 -0
  224. mindspore/ops/composite/multitype_ops/_compile_utils.py +2 -1
  225. mindspore/ops/composite/multitype_ops/_constexpr_utils.py +9 -0
  226. mindspore/ops/extend/__init__.py +9 -1
  227. mindspore/ops/extend/array_func.py +134 -27
  228. mindspore/ops/extend/math_func.py +3 -3
  229. mindspore/ops/extend/nn_func.py +363 -2
  230. mindspore/ops/function/__init__.py +19 -2
  231. mindspore/ops/function/array_func.py +463 -439
  232. mindspore/ops/function/clip_func.py +7 -18
  233. mindspore/ops/function/grad/grad_func.py +5 -5
  234. mindspore/ops/function/linalg_func.py +4 -4
  235. mindspore/ops/function/math_func.py +260 -243
  236. mindspore/ops/function/nn_func.py +825 -62
  237. mindspore/ops/function/random_func.py +73 -4
  238. mindspore/ops/function/sparse_unary_func.py +1 -1
  239. mindspore/ops/function/vmap_func.py +1 -1
  240. mindspore/ops/functional.py +2 -2
  241. mindspore/ops/op_info_register.py +1 -31
  242. mindspore/ops/operations/__init__.py +2 -3
  243. mindspore/ops/operations/_grad_ops.py +2 -107
  244. mindspore/ops/operations/_inner_ops.py +5 -5
  245. mindspore/ops/operations/_sequence_ops.py +2 -2
  246. mindspore/ops/operations/array_ops.py +11 -233
  247. mindspore/ops/operations/comm_ops.py +32 -32
  248. mindspore/ops/operations/custom_ops.py +7 -89
  249. mindspore/ops/operations/manually_defined/ops_def.py +329 -4
  250. mindspore/ops/operations/math_ops.py +13 -163
  251. mindspore/ops/operations/nn_ops.py +9 -316
  252. mindspore/ops/operations/random_ops.py +1 -1
  253. mindspore/ops/operations/sparse_ops.py +3 -3
  254. mindspore/ops/primitive.py +2 -2
  255. mindspore/ops_generate/arg_dtype_cast.py +12 -3
  256. mindspore/ops_generate/arg_handler.py +24 -0
  257. mindspore/ops_generate/gen_ops_inner_prim.py +2 -0
  258. mindspore/ops_generate/gen_pyboost_func.py +13 -6
  259. mindspore/ops_generate/pyboost_utils.py +2 -17
  260. mindspore/parallel/__init__.py +3 -2
  261. mindspore/parallel/_auto_parallel_context.py +106 -1
  262. mindspore/parallel/_parallel_serialization.py +34 -2
  263. mindspore/parallel/_utils.py +16 -0
  264. mindspore/parallel/algo_parameter_config.py +4 -4
  265. mindspore/parallel/checkpoint_transform.py +249 -77
  266. mindspore/parallel/cluster/process_entity/_api.py +1 -1
  267. mindspore/parallel/parameter_broadcast.py +1 -1
  268. mindspore/parallel/shard.py +1 -1
  269. mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +1 -0
  270. mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +17 -5
  271. mindspore/profiler/parser/ascend_msprof_exporter.py +3 -3
  272. mindspore/profiler/parser/ascend_msprof_generator.py +10 -3
  273. mindspore/profiler/parser/ascend_op_generator.py +26 -9
  274. mindspore/profiler/parser/ascend_timeline_generator.py +7 -4
  275. mindspore/profiler/parser/profiler_info.py +11 -1
  276. mindspore/profiler/profiling.py +13 -5
  277. mindspore/rewrite/api/node.py +12 -12
  278. mindspore/rewrite/api/symbol_tree.py +11 -11
  279. mindspore/run_check/_check_version.py +1 -1
  280. mindspore/safeguard/rewrite_obfuscation.py +2 -2
  281. mindspore/train/amp.py +4 -4
  282. mindspore/train/anf_ir_pb2.py +8 -2
  283. mindspore/train/callback/_backup_and_restore.py +2 -2
  284. mindspore/train/callback/_callback.py +4 -4
  285. mindspore/train/callback/_checkpoint.py +2 -2
  286. mindspore/train/callback/_early_stop.py +2 -2
  287. mindspore/train/callback/_landscape.py +4 -4
  288. mindspore/train/callback/_loss_monitor.py +2 -2
  289. mindspore/train/callback/_on_request_exit.py +2 -2
  290. mindspore/train/callback/_reduce_lr_on_plateau.py +2 -2
  291. mindspore/train/callback/_summary_collector.py +2 -2
  292. mindspore/train/callback/_time_monitor.py +2 -2
  293. mindspore/train/dataset_helper.py +8 -3
  294. mindspore/train/loss_scale_manager.py +2 -2
  295. mindspore/train/metrics/metric.py +3 -3
  296. mindspore/train/mind_ir_pb2.py +22 -17
  297. mindspore/train/model.py +15 -15
  298. mindspore/train/serialization.py +18 -18
  299. mindspore/train/summary/summary_record.py +7 -7
  300. mindspore/train/train_thor/convert_utils.py +3 -3
  301. mindspore/version.py +1 -1
  302. {mindspore-2.3.0rc1.dist-info → mindspore-2.3.0rc2.dist-info}/METADATA +1 -1
  303. {mindspore-2.3.0rc1.dist-info → mindspore-2.3.0rc2.dist-info}/RECORD +307 -260
  304. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/matmul_stridedslice/tiling_data.h +0 -59
  305. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/FlashAttentionScore_bf16_BNSD_mix.o +0 -0
  306. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/FlashAttentionScore_bf16_BSH_mix.o +0 -0
  307. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/FlashAttentionScore_fp16_BNSD_mix.o +0 -0
  308. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/FlashAttentionScore_fp16_BSH_mix.o +0 -0
  309. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/PagedAttention_bf16_BNSD_mix.o +0 -0
  310. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/PagedAttention_bf16_BSH_mix.o +0 -0
  311. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/PagedAttention_fp16_BNSD_mix.o +0 -0
  312. mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/op_kernels/ascend910b/BSAttention/PagedAttention_fp16_BSH_mix.o +0 -0
  313. /mindspore/lib/plugin/ascend/ms_kernels_internal/internal_kernel/include/{attention/bs_attention_mix_hwsync.h → flash_attention_score/kernel/flash_attention_score_mix_hwsync.h} +0 -0
  314. {mindspore-2.3.0rc1.dist-info → mindspore-2.3.0rc2.dist-info}/WHEEL +0 -0
  315. {mindspore-2.3.0rc1.dist-info → mindspore-2.3.0rc2.dist-info}/entry_points.txt +0 -0
  316. {mindspore-2.3.0rc1.dist-info → mindspore-2.3.0rc2.dist-info}/top_level.txt +0 -0
@@ -262,7 +262,7 @@ class ConvertMode(IntEnum):
262
262
 
263
263
  mode = c_values.get(mode)
264
264
  if mode is None:
265
- raise RuntimeError("Unsupported ConvertMode, see https://www.mindspore.cn/docs/zh-CN/r2.3.q1/api_python/"
265
+ raise RuntimeError("Unsupported ConvertMode, see https://www.mindspore.cn/docs/zh-CN/master/api_python/"
266
266
  "dataset_vision/mindspore.dataset.vision.ConvertColor.html for more details.")
267
267
  return mode
268
268
 
@@ -63,7 +63,7 @@ class Adadelta(Optimizer):
63
63
  .. warning::
64
64
  This is an experimental optimizer API that is subject to change.
65
65
  This module must be used with lr scheduler module in `LRScheduler Class
66
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
66
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
67
67
 
68
68
  Args:
69
69
  params (Union[list(Parameter), list(dict)]): list of parameters to optimize or dicts defining
@@ -97,7 +97,7 @@ class Adadelta(Optimizer):
97
97
  >>> from mindspore import nn
98
98
  >>> from mindspore.experimental import optim
99
99
  >>> # Define the network structure of LeNet5. Refer to
100
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
100
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
101
101
  >>> net = LeNet5()
102
102
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
103
103
  >>> optimizer = optim.Adadelta(net.trainable_params(), lr=0.1)
@@ -60,7 +60,7 @@ class Adagrad(Optimizer):
60
60
  .. warning::
61
61
  This is an experimental optimizer API that is subject to change.
62
62
  This module must be used with lr scheduler module in `LRScheduler Class
63
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
63
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
64
64
 
65
65
  Args:
66
66
  params (Union[list(Parameter), list(dict)]): list of parameters to optimize or dicts defining
@@ -95,7 +95,7 @@ class Adagrad(Optimizer):
95
95
  >>> from mindspore import nn
96
96
  >>> from mindspore.experimental import optim
97
97
  >>> # Define the network structure of LeNet5. Refer to
98
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
98
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
99
99
  >>> net = LeNet5()
100
100
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
101
101
  >>> optimizer = optim.Adagrad(net.trainable_params(), lr=0.1)
@@ -80,7 +80,7 @@ class Adam(Optimizer):
80
80
  .. warning::
81
81
  This is an experimental optimizer API that is subject to change.
82
82
  This module must be used with lr scheduler module in `LRScheduler Class
83
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.nn.html#learningrateschedule-class>`_ .
83
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.nn.html#learningrateschedule-class>`_ .
84
84
 
85
85
  Args:
86
86
  params (Union[list(Parameter), list(dict)]): list of parameters to optimize or dicts defining
@@ -115,7 +115,7 @@ class Adam(Optimizer):
115
115
  >>> from mindspore import nn
116
116
  >>> from mindspore.experimental import optim
117
117
  >>> # Define the network structure of LeNet5. Refer to
118
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
118
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
119
119
  >>> net = LeNet5()
120
120
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
121
121
  >>> optimizer = optim.Adam(net.trainable_params(), lr=0.1)
@@ -66,7 +66,7 @@ class Adamax(Optimizer):
66
66
  .. warning::
67
67
  This is an experimental optimizer API that is subject to change.
68
68
  This module must be used with lr scheduler module in `LRScheduler Class
69
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
69
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
70
70
 
71
71
  Args:
72
72
  params (Union[list(Parameter), list(dict)]): list of parameters to optimize or dicts defining
@@ -100,7 +100,7 @@ class Adamax(Optimizer):
100
100
  >>> from mindspore import nn
101
101
  >>> from mindspore.experimental import optim
102
102
  >>> # Define the network structure of LeNet5. Refer to
103
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
103
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
104
104
  >>> net = LeNet5()
105
105
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
106
106
  >>> optimizer = optim.Adamax(net.trainable_params(), lr=0.1)
@@ -101,7 +101,7 @@ class AdamW(Optimizer):
101
101
  .. warning::
102
102
  This is an experimental optimizer API that is subject to change.
103
103
  This module must be used with lr scheduler module in `LRScheduler Class
104
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
104
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
105
105
 
106
106
  Args:
107
107
  params (Union[list(Parameter), list(dict)]): list of parameters to optimize or dicts defining
@@ -136,7 +136,7 @@ class AdamW(Optimizer):
136
136
  >>> from mindspore import nn
137
137
  >>> from mindspore.experimental import optim
138
138
  >>> # Define the network structure of LeNet5. Refer to
139
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
139
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
140
140
  >>> net = LeNet5()
141
141
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
142
142
  >>> optimizer = optim.AdamW(net.trainable_params(), lr=0.1)
@@ -56,7 +56,7 @@ class ASGD(Optimizer):
56
56
  .. warning::
57
57
  This is an experimental optimizer API that is subject to change.
58
58
  This module must be used with lr scheduler module in `LRScheduler Class
59
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
59
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
60
60
 
61
61
  Args:
62
62
  params (Union[list(Parameter), list(dict)]): list of parameters to optimize or dicts defining
@@ -85,7 +85,7 @@ class ASGD(Optimizer):
85
85
  >>> from mindspore import nn
86
86
  >>> from mindspore.experimental import optim
87
87
  >>> # Define the network structure of LeNet5. Refer to
88
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
88
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
89
89
  >>> net = LeNet5()
90
90
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
91
91
  >>> optimizer = optim.ASGD(net.trainable_params(), lr=0.1)
@@ -38,7 +38,7 @@ class LRScheduler:
38
38
  .. warning::
39
39
  This is an experimental lr scheduler module that is subject to change.
40
40
  This module must be used with optimizers in `Experimental Optimizer
41
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
41
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
42
42
 
43
43
  Args:
44
44
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): The optimizer instance.
@@ -149,7 +149,7 @@ class StepLR(LRScheduler):
149
149
  .. warning::
150
150
  This is an experimental lr scheduler module that is subject to change.
151
151
  This module must be used with optimizers in `Experimental Optimizer
152
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
152
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
153
153
 
154
154
  Args:
155
155
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -166,7 +166,7 @@ class StepLR(LRScheduler):
166
166
  >>> from mindspore import nn
167
167
  >>> from mindspore.experimental import optim
168
168
  >>> # Define the network structure of LeNet5. Refer to
169
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
169
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
170
170
  >>> net = LeNet5()
171
171
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
172
172
  >>> optimizer = optim.Adam(net.trainable_params(), lr=0.05)
@@ -186,7 +186,7 @@ class StepLR(LRScheduler):
186
186
  ... return loss
187
187
  >>> for epoch in range(6):
188
188
  ... # Create the dataset taking MNIST as an example. Refer to
189
- ... # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/mnist.py
189
+ ... # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
190
190
  ... for data, label in create_dataset():
191
191
  ... train_step(data, label)
192
192
  ... scheduler.step()
@@ -221,7 +221,7 @@ class LinearLR(LRScheduler):
221
221
  .. warning::
222
222
  This is an experimental lr scheduler module that is subject to change.
223
223
  This module must be used with optimizers in `Experimental Optimizer
224
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
224
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
225
225
 
226
226
  Args:
227
227
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -246,7 +246,7 @@ class LinearLR(LRScheduler):
246
246
  >>> from mindspore import nn
247
247
  >>> from mindspore.experimental import optim
248
248
  >>> # Define the network structure of LeNet5. Refer to
249
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
249
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
250
250
  >>> net = LeNet5()
251
251
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
252
252
  >>> optimizer = optim.Adam(net.trainable_params(), lr=0.05)
@@ -268,7 +268,7 @@ class LinearLR(LRScheduler):
268
268
  ... return loss
269
269
  >>> for epoch in range(5):
270
270
  ... # Create the dataset taking MNIST as an example. Refer to
271
- ... # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/mnist.py
271
+ ... # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/mnist.py
272
272
  ... for data, label in create_dataset():
273
273
  ... train_step(data, label)
274
274
  ... scheduler.step()
@@ -282,7 +282,8 @@ class LinearLR(LRScheduler):
282
282
 
283
283
  if end_factor > 1.0 or end_factor < 0:
284
284
  raise ValueError('Ending multiplicative factor expected to be between 0 and 1.')
285
-
285
+ if not isinstance(total_iters, int):
286
+ raise TypeError(f"For 'LinearLR', the type of total_iters must be int, but got {type(total_iters)}.")
286
287
  self.start_factor = start_factor
287
288
  self.end_factor = end_factor
288
289
  self.total_iters = total_iters
@@ -316,7 +317,7 @@ class ExponentialLR(LRScheduler):
316
317
  .. warning::
317
318
  This is an experimental lr scheduler module that is subject to change.
318
319
  This module must be used with optimizers in `Experimental Optimizer
319
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
320
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
320
321
 
321
322
  Args:
322
323
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -383,7 +384,7 @@ class PolynomialLR(LRScheduler):
383
384
  .. warning::
384
385
  This is an experimental lr scheduler module that is subject to change.
385
386
  This module must be used with optimizers in `Experimental Optimizer
386
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
387
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
387
388
 
388
389
  Args:
389
390
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -423,6 +424,8 @@ class PolynomialLR(LRScheduler):
423
424
  raise TypeError(f"For 'PolynomialLR', the 'power' must be float, but got {type(power)}.")
424
425
  if power < 0:
425
426
  raise ValueError(f"For 'PolynomialLR', the 'power' must be >= 0, but got {power}.")
427
+ if not isinstance(total_iters, int):
428
+ raise TypeError(f"For 'PolynomialLR', the type of total_iters must be int, but got {type(total_iters)}.")
426
429
  self.total_iters = total_iters
427
430
  self.power = power
428
431
  self.min = P.Minimum()
@@ -450,7 +453,7 @@ class LambdaLR(LRScheduler):
450
453
  .. warning::
451
454
  This is an experimental lr scheduler module that is subject to change.
452
455
  This module must be used with optimizers in `Experimental Optimizer
453
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
456
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
454
457
 
455
458
  Args:
456
459
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -503,7 +506,7 @@ class MultiplicativeLR(LRScheduler):
503
506
  .. warning::
504
507
  This is an experimental lr scheduler module that is subject to change.
505
508
  This module must be used with optimizers in `Experimental Optimizer
506
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
509
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
507
510
 
508
511
  Args:
509
512
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -557,7 +560,7 @@ class MultiStepLR(LRScheduler):
557
560
  .. warning::
558
561
  This is an experimental lr scheduler module that is subject to change.
559
562
  This module must be used with optimizers in `Experimental Optimizer
560
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
563
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
561
564
 
562
565
  Args:
563
566
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -633,7 +636,7 @@ class ConstantLR(LRScheduler):
633
636
  .. warning::
634
637
  This is an experimental lr scheduler module that is subject to change.
635
638
  This module must be used with optimizers in `Experimental Optimizer
636
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
639
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
637
640
 
638
641
  Args:
639
642
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -668,7 +671,8 @@ class ConstantLR(LRScheduler):
668
671
  def __init__(self, optimizer, factor=1.0 / 3, total_iters=5, last_epoch=-1):
669
672
  if factor > 1.0 or factor < 0:
670
673
  raise ValueError('Constant multiplicative factor expected to be between 0 and 1.')
671
-
674
+ if not isinstance(total_iters, int):
675
+ raise TypeError(f"For 'ConstantLR', the type of total_iters must be int, but got {type(total_iters)}.")
672
676
  self.factor = factor
673
677
  self.total_iters = total_iters
674
678
  super(ConstantLR, self).__init__(optimizer, last_epoch)
@@ -695,7 +699,7 @@ class SequentialLR:
695
699
  .. warning::
696
700
  This is an experimental lr scheduler module that is subject to change.
697
701
  This module must be used with optimizers in `Experimental Optimizer
698
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
702
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
699
703
 
700
704
  Args:
701
705
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -799,7 +803,7 @@ class ReduceLROnPlateau:
799
803
  .. warning::
800
804
  This is an experimental lr scheduler module that is subject to change.
801
805
  This module must be used with optimizers in `Experimental Optimizer
802
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
806
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
803
807
 
804
808
  Args:
805
809
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -994,7 +998,7 @@ class CyclicLR(LRScheduler):
994
998
  .. warning::
995
999
  This is an experimental lr scheduler module that is subject to change.
996
1000
  This module must be used with optimizers in `Experimental Optimizer
997
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
1001
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
998
1002
 
999
1003
  Args:
1000
1004
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -1171,7 +1175,7 @@ class CosineAnnealingWarmRestarts(LRScheduler):
1171
1175
  .. warning::
1172
1176
  This is an experimental lr scheduler module that is subject to change.
1173
1177
  This module must be used with optimizers in `Experimental Optimizer
1174
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
1178
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
1175
1179
 
1176
1180
  Args:
1177
1181
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -1303,7 +1307,7 @@ class CosineAnnealingLR(LRScheduler):
1303
1307
  .. warning::
1304
1308
  This is an experimental lr scheduler module that is subject to change.
1305
1309
  This module must be used with optimizers in `Experimental Optimizer
1306
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
1310
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#experimental-optimizer>`_ .
1307
1311
 
1308
1312
  Args:
1309
1313
  optimizer (:class:`mindspore.experimental.optim.Optimizer`): Wrapped optimizer.
@@ -57,7 +57,7 @@ class NAdam(Optimizer):
57
57
  .. warning::
58
58
  This is an experimental optimizer API that is subject to change.
59
59
  This module must be used with lr scheduler module in `LRScheduler Class
60
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
60
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
61
61
 
62
62
  Args:
63
63
  params (Union[list(Parameter), list(dict)]): list of parameters to optimize or dicts defining
@@ -89,7 +89,7 @@ class NAdam(Optimizer):
89
89
  >>> from mindspore import nn
90
90
  >>> from mindspore.experimental import optim
91
91
  >>> # Define the network structure of LeNet5. Refer to
92
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
92
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
93
93
  >>> net = LeNet5()
94
94
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
95
95
  >>> optimizer = optim.NAdam(net.trainable_params(), lr=0.1)
@@ -36,7 +36,7 @@ class Optimizer(Cell):
36
36
  .. warning::
37
37
  This is an experimental optimizer API that is subject to change.
38
38
  This module must be used with lr scheduler module in `LRScheduler Class
39
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
39
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
40
40
 
41
41
  Args:
42
42
  params (Union[list(Parameter), list(dict)]): an iterable of :class:`mindspore.Parameter` or
@@ -89,7 +89,7 @@ class RAdam(Optimizer):
89
89
  .. warning::
90
90
  This is an experimental optimizer API that is subject to change.
91
91
  This module must be used with lr scheduler module in `LRScheduler Class
92
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
92
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
93
93
 
94
94
  Args:
95
95
  params (Union[list(Parameter), list(dict)]): list of parameters to optimize or dicts defining
@@ -119,7 +119,7 @@ class RAdam(Optimizer):
119
119
  >>> from mindspore import nn
120
120
  >>> from mindspore.experimental import optim
121
121
  >>> # Define the network structure of LeNet5. Refer to
122
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
122
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
123
123
  >>> net = LeNet5()
124
124
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
125
125
  >>> optimizer = optim.RAdam(net.trainable_params(), lr=0.1)
@@ -53,7 +53,7 @@ class RMSprop(Optimizer):
53
53
  .. warning::
54
54
  This is an experimental optimizer API that is subject to change.
55
55
  This module must be used with lr scheduler module in `LRScheduler Class
56
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
56
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
57
57
 
58
58
  Args:
59
59
  params (Union[list(Parameter), list(dict)]): list of parameters to optimize or dicts defining
@@ -87,7 +87,7 @@ class RMSprop(Optimizer):
87
87
  >>> from mindspore import nn
88
88
  >>> from mindspore.experimental import optim
89
89
  >>> # Define the network structure of LeNet5. Refer to
90
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
90
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
91
91
  >>> net = LeNet5()
92
92
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
93
93
  >>> optimizer = optim.RMSprop(net.trainable_params(), lr=0.1)
@@ -68,7 +68,7 @@ class Rprop(Optimizer):
68
68
  .. warning::
69
69
  This is an experimental optimizer API that is subject to change.
70
70
  This module must be used with lr scheduler module in `LRScheduler Class
71
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
71
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
72
72
 
73
73
  Args:
74
74
  params (Union[list(Parameter), list(dict)]): list of parameters to optimize or dicts defining
@@ -100,7 +100,7 @@ class Rprop(Optimizer):
100
100
  >>> from mindspore import nn
101
101
  >>> from mindspore.experimental import optim
102
102
  >>> # Define the network structure of LeNet5. Refer to
103
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
103
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
104
104
  >>> net = LeNet5()
105
105
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
106
106
  >>> optimizer = optim.Rprop(net.trainable_params(), lr=0.1)
@@ -56,7 +56,7 @@ class SGD(Optimizer):
56
56
  .. warning::
57
57
  This is an experimental optimizer API that is subject to change.
58
58
  This module must be used with lr scheduler module in `LRScheduler Class
59
- <https://www.mindspore.cn/docs/en/r2.3.q1/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
59
+ <https://www.mindspore.cn/docs/en/master/api_python/mindspore.experimental.html#lrscheduler-class>`_ .
60
60
 
61
61
  Args:
62
62
  params (Union[list(Parameter), list(dict)]): list of parameters to optimize or dicts defining
@@ -90,7 +90,7 @@ class SGD(Optimizer):
90
90
  >>> from mindspore import nn
91
91
  >>> from mindspore.experimental import optim
92
92
  >>> # Define the network structure of LeNet5. Refer to
93
- >>> # https://gitee.com/mindspore/docs/blob/r2.3.q1/docs/mindspore/code/lenet.py
93
+ >>> # https://gitee.com/mindspore/docs/blob/master/docs/mindspore/code/lenet.py
94
94
  >>> net = LeNet5()
95
95
  >>> loss_fn = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
96
96
  >>> optimizer = optim.SGD(net.trainable_params(), lr=0.1)
mindspore/hal/stream.py CHANGED
@@ -29,6 +29,8 @@ class Stream(Stream_):
29
29
  A device stream is a linear sequence of execution that belongs to a specific device,
30
30
  independent from other streams.
31
31
 
32
+ For a quick start of using Stream, please refer to `Illustration of stream management <https://www.mindspore.cn/docs/en/master/api_python/samples/hal/stream_manager.html>`_ .
33
+
32
34
  Args:
33
35
  priority (int, optional): priority of the stream, lower numbers represent higher priorities.
34
36
  By default, streams have priority ``0``.
@@ -71,6 +71,11 @@ enum ActivationType : int64_t {
71
71
  GELU = 19,
72
72
  GLU = 20,
73
73
  UNKNOWN = 21,
74
+ FASTGELU = 22,
75
+ SILU = 23,
76
+ GEGLU = 24,
77
+ SWIGLU = 25,
78
+ REGLU = 26,
74
79
  };
75
80
 
76
81
  enum ReduceMode : int64_t {
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
@@ -7644,11 +7644,11 @@
7644
7644
  "CustSliceGrad":{
7645
7645
  "input0":{
7646
7646
  "name":"dy",
7647
- "type":"DT_BOOL,DT_COMPLEX128,DT_COMPLEX64,DT_DOUBLE,DT_FLOAT,DT_FLOAT16,DT_INT16,DT_INT32,DT_INT64,DT_INT8,DT_UINT16,DT_UINT32,DT_UINT64,DT_UINT8"
7647
+ "type":"DT_BOOL,DT_COMPLEX128,DT_COMPLEX64,DT_DOUBLE,DT_FLOAT,DT_FLOAT16,DT_INT16,DT_INT32,DT_INT64,DT_INT8,DT_UINT16,DT_UINT32,DT_UINT64,DT_UINT8,DT_BF16"
7648
7648
  },
7649
7649
  "input1":{
7650
7650
  "name":"x",
7651
- "type":"DT_BOOL,DT_COMPLEX128,DT_COMPLEX64,DT_DOUBLE,DT_FLOAT,DT_FLOAT16,DT_INT16,DT_INT32,DT_INT64,DT_INT8,DT_UINT16,DT_UINT32,DT_UINT64,DT_UINT8"
7651
+ "type":"DT_BOOL,DT_COMPLEX128,DT_COMPLEX64,DT_DOUBLE,DT_FLOAT,DT_FLOAT16,DT_INT16,DT_INT32,DT_INT64,DT_INT8,DT_UINT16,DT_UINT32,DT_UINT64,DT_UINT8,DT_BF16"
7652
7652
  },
7653
7653
  "input2":{
7654
7654
  "name":"begin",
@@ -7674,7 +7674,7 @@
7674
7674
  },
7675
7675
  "output0":{
7676
7676
  "name":"dx",
7677
- "type":"DT_BOOL,DT_COMPLEX128,DT_COMPLEX64,DT_DOUBLE,DT_FLOAT,DT_FLOAT16,DT_INT16,DT_INT32,DT_INT64,DT_INT8,DT_UINT16,DT_UINT32,DT_UINT64,DT_UINT8"
7677
+ "type":"DT_BOOL,DT_COMPLEX128,DT_COMPLEX64,DT_DOUBLE,DT_FLOAT,DT_FLOAT16,DT_INT16,DT_INT32,DT_INT64,DT_INT8,DT_UINT16,DT_UINT32,DT_UINT64,DT_UINT8,DT_BF16"
7678
7678
  }
7679
7679
  },
7680
7680
  "CustSolveTriangular":{
@@ -12907,11 +12907,11 @@
12907
12907
  "SliceGrad":{
12908
12908
  "input0":{
12909
12909
  "name":"dy",
12910
- "type":"DT_BOOL,DT_COMPLEX128,DT_COMPLEX64,DT_DOUBLE,DT_FLOAT,DT_FLOAT16,DT_INT16,DT_INT32,DT_INT64,DT_INT8,DT_UINT16,DT_UINT32,DT_UINT64,DT_UINT8"
12910
+ "type":"DT_BOOL,DT_COMPLEX128,DT_COMPLEX64,DT_DOUBLE,DT_FLOAT,DT_FLOAT16,DT_INT16,DT_INT32,DT_INT64,DT_INT8,DT_UINT16,DT_UINT32,DT_UINT64,DT_UINT8,DT_BF16"
12911
12911
  },
12912
12912
  "input1":{
12913
12913
  "name":"x",
12914
- "type":"DT_BOOL,DT_COMPLEX128,DT_COMPLEX64,DT_DOUBLE,DT_FLOAT,DT_FLOAT16,DT_INT16,DT_INT32,DT_INT64,DT_INT8,DT_UINT16,DT_UINT32,DT_UINT64,DT_UINT8"
12914
+ "type":"DT_BOOL,DT_COMPLEX128,DT_COMPLEX64,DT_DOUBLE,DT_FLOAT,DT_FLOAT16,DT_INT16,DT_INT32,DT_INT64,DT_INT8,DT_UINT16,DT_UINT32,DT_UINT64,DT_UINT8,DT_BF16"
12915
12915
  },
12916
12916
  "input2":{
12917
12917
  "name":"begin",
@@ -12937,7 +12937,7 @@
12937
12937
  },
12938
12938
  "output0":{
12939
12939
  "name":"dx",
12940
- "type":"DT_BOOL,DT_COMPLEX128,DT_COMPLEX64,DT_DOUBLE,DT_FLOAT,DT_FLOAT16,DT_INT16,DT_INT32,DT_INT64,DT_INT8,DT_UINT16,DT_UINT32,DT_UINT64,DT_UINT8"
12940
+ "type":"DT_BOOL,DT_COMPLEX128,DT_COMPLEX64,DT_DOUBLE,DT_FLOAT,DT_FLOAT16,DT_INT16,DT_INT32,DT_INT64,DT_INT8,DT_UINT16,DT_UINT32,DT_UINT64,DT_UINT8,DT_BF16"
12941
12941
  }
12942
12942
  },
12943
12943
  "SolveTriangular":{