mindspore 2.3.0__cp310-cp310-win_amd64.whl → 2.4.1__cp310-cp310-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mindspore might be problematic. Click here for more details.

Files changed (275) hide show
  1. mindspore/.commit_id +1 -1
  2. mindspore/__init__.py +3 -1
  3. mindspore/_c_dataengine.cp310-win_amd64.pyd +0 -0
  4. mindspore/_c_expression.cp310-win_amd64.pyd +0 -0
  5. mindspore/_c_mindrecord.cp310-win_amd64.pyd +0 -0
  6. mindspore/_checkparam.py +50 -9
  7. mindspore/_extends/parse/compile_config.py +41 -0
  8. mindspore/_extends/parse/parser.py +9 -7
  9. mindspore/_extends/parse/standard_method.py +52 -14
  10. mindspore/_extends/pijit/pijit_func_white_list.py +350 -24
  11. mindspore/amp.py +24 -10
  12. mindspore/common/__init__.py +6 -4
  13. mindspore/common/_pijit_context.py +190 -0
  14. mindspore/common/_register_for_tensor.py +2 -1
  15. mindspore/common/_tensor_overload.py +139 -0
  16. mindspore/common/api.py +102 -87
  17. mindspore/common/dump.py +5 -6
  18. mindspore/common/generator.py +1 -7
  19. mindspore/common/hook_handle.py +14 -26
  20. mindspore/common/initializer.py +51 -15
  21. mindspore/common/mindir_util.py +2 -2
  22. mindspore/common/parameter.py +62 -15
  23. mindspore/common/recompute.py +39 -9
  24. mindspore/common/sparse_tensor.py +7 -3
  25. mindspore/common/tensor.py +183 -37
  26. mindspore/communication/__init__.py +1 -1
  27. mindspore/communication/_comm_helper.py +38 -3
  28. mindspore/communication/comm_func.py +315 -60
  29. mindspore/communication/management.py +14 -14
  30. mindspore/context.py +132 -22
  31. mindspore/dataset/__init__.py +1 -1
  32. mindspore/dataset/audio/__init__.py +1 -1
  33. mindspore/dataset/core/config.py +7 -0
  34. mindspore/dataset/core/validator_helpers.py +7 -0
  35. mindspore/dataset/engine/cache_client.py +1 -1
  36. mindspore/dataset/engine/datasets.py +72 -44
  37. mindspore/dataset/engine/datasets_audio.py +7 -7
  38. mindspore/dataset/engine/datasets_standard_format.py +53 -3
  39. mindspore/dataset/engine/datasets_text.py +20 -20
  40. mindspore/dataset/engine/datasets_user_defined.py +174 -104
  41. mindspore/dataset/engine/datasets_vision.py +33 -33
  42. mindspore/dataset/engine/iterators.py +29 -0
  43. mindspore/dataset/engine/obs/util.py +7 -0
  44. mindspore/dataset/engine/queue.py +114 -60
  45. mindspore/dataset/engine/serializer_deserializer.py +2 -2
  46. mindspore/dataset/engine/validators.py +34 -14
  47. mindspore/dataset/text/__init__.py +1 -4
  48. mindspore/dataset/transforms/__init__.py +0 -3
  49. mindspore/dataset/utils/line_reader.py +2 -0
  50. mindspore/dataset/vision/__init__.py +1 -4
  51. mindspore/dataset/vision/utils.py +1 -1
  52. mindspore/dataset/vision/validators.py +2 -1
  53. mindspore/{nn/extend → experimental/es}/__init__.py +4 -11
  54. mindspore/experimental/es/embedding_service.py +883 -0
  55. mindspore/{nn/layer → experimental/es}/embedding_service_layer.py +218 -30
  56. mindspore/experimental/llm_boost/__init__.py +21 -0
  57. mindspore/{nn/extend/layer → experimental/llm_boost/atb}/__init__.py +4 -8
  58. mindspore/experimental/llm_boost/atb/boost_base.py +211 -0
  59. mindspore/experimental/llm_boost/atb/llama_boost.py +115 -0
  60. mindspore/experimental/llm_boost/atb/qwen_boost.py +101 -0
  61. mindspore/experimental/llm_boost/register.py +129 -0
  62. mindspore/experimental/llm_boost/utils.py +31 -0
  63. mindspore/experimental/optim/adamw.py +85 -0
  64. mindspore/experimental/optim/optimizer.py +3 -0
  65. mindspore/hal/__init__.py +3 -3
  66. mindspore/hal/contiguous_tensors_handle.py +175 -0
  67. mindspore/hal/stream.py +18 -0
  68. mindspore/include/api/model_group.h +13 -1
  69. mindspore/include/api/types.h +10 -10
  70. mindspore/include/dataset/config.h +2 -2
  71. mindspore/include/dataset/constants.h +2 -2
  72. mindspore/include/dataset/execute.h +2 -2
  73. mindspore/include/dataset/vision.h +4 -0
  74. mindspore/log.py +1 -1
  75. mindspore/mindrecord/filewriter.py +68 -51
  76. mindspore/mindspore_backend.dll +0 -0
  77. mindspore/mindspore_common.dll +0 -0
  78. mindspore/mindspore_core.dll +0 -0
  79. mindspore/mindspore_np_dtype.dll +0 -0
  80. mindspore/mindspore_ops.dll +0 -0
  81. mindspore/mint/__init__.py +983 -46
  82. mindspore/mint/distributed/__init__.py +31 -0
  83. mindspore/mint/distributed/distributed.py +254 -0
  84. mindspore/mint/nn/__init__.py +268 -23
  85. mindspore/mint/nn/functional.py +125 -19
  86. mindspore/mint/nn/layer/__init__.py +39 -0
  87. mindspore/mint/nn/layer/activation.py +133 -0
  88. mindspore/mint/nn/layer/normalization.py +477 -0
  89. mindspore/mint/nn/layer/pooling.py +110 -0
  90. mindspore/mint/optim/adamw.py +26 -13
  91. mindspore/mint/special/__init__.py +63 -0
  92. mindspore/multiprocessing/__init__.py +2 -1
  93. mindspore/nn/__init__.py +0 -1
  94. mindspore/nn/cell.py +276 -96
  95. mindspore/nn/layer/activation.py +211 -44
  96. mindspore/nn/layer/basic.py +137 -10
  97. mindspore/nn/layer/embedding.py +137 -2
  98. mindspore/nn/layer/normalization.py +101 -5
  99. mindspore/nn/layer/padding.py +34 -48
  100. mindspore/nn/layer/pooling.py +161 -7
  101. mindspore/nn/layer/transformer.py +3 -3
  102. mindspore/nn/loss/__init__.py +2 -2
  103. mindspore/nn/loss/loss.py +84 -6
  104. mindspore/nn/optim/__init__.py +2 -1
  105. mindspore/nn/optim/adadelta.py +1 -1
  106. mindspore/nn/optim/adam.py +1 -1
  107. mindspore/nn/optim/lamb.py +1 -1
  108. mindspore/nn/optim/tft_wrapper.py +124 -0
  109. mindspore/nn/wrap/cell_wrapper.py +12 -23
  110. mindspore/nn/wrap/grad_reducer.py +5 -5
  111. mindspore/nn/wrap/loss_scale.py +17 -3
  112. mindspore/numpy/__init__.py +1 -1
  113. mindspore/numpy/array_creations.py +65 -68
  114. mindspore/numpy/array_ops.py +64 -60
  115. mindspore/numpy/fft.py +610 -75
  116. mindspore/numpy/logic_ops.py +11 -10
  117. mindspore/numpy/math_ops.py +85 -84
  118. mindspore/numpy/utils_const.py +4 -4
  119. mindspore/opencv_core452.dll +0 -0
  120. mindspore/opencv_imgcodecs452.dll +0 -0
  121. mindspore/opencv_imgproc452.dll +0 -0
  122. mindspore/ops/__init__.py +6 -4
  123. mindspore/ops/_grad_experimental/grad_array_ops.py +0 -11
  124. mindspore/ops/_grad_experimental/grad_comm_ops.py +67 -4
  125. mindspore/ops/_grad_experimental/grad_math_ops.py +0 -22
  126. mindspore/ops/_vmap/vmap_array_ops.py +2 -4
  127. mindspore/ops/_vmap/vmap_math_ops.py +17 -1
  128. mindspore/ops/_vmap/vmap_nn_ops.py +43 -2
  129. mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +91 -7
  130. mindspore/ops/auto_generate/gen_arg_dtype_cast.py +2 -0
  131. mindspore/ops/auto_generate/gen_extend_func.py +767 -13
  132. mindspore/ops/auto_generate/gen_ops_def.py +2452 -364
  133. mindspore/ops/auto_generate/gen_ops_prim.py +5442 -1756
  134. mindspore/ops/auto_generate/pyboost_inner_prim.py +176 -56
  135. mindspore/ops/composite/base.py +85 -48
  136. mindspore/ops/composite/multitype_ops/_compile_utils.py +1 -0
  137. mindspore/ops/composite/multitype_ops/not_in_impl.py +2 -2
  138. mindspore/ops/function/__init__.py +22 -0
  139. mindspore/ops/function/array_func.py +492 -153
  140. mindspore/ops/function/debug_func.py +113 -1
  141. mindspore/ops/function/fft_func.py +15 -2
  142. mindspore/ops/function/grad/grad_func.py +3 -2
  143. mindspore/ops/function/math_func.py +564 -207
  144. mindspore/ops/function/nn_func.py +817 -383
  145. mindspore/ops/function/other_func.py +3 -2
  146. mindspore/ops/function/random_func.py +402 -12
  147. mindspore/ops/function/reshard_func.py +13 -11
  148. mindspore/ops/function/sparse_unary_func.py +1 -1
  149. mindspore/ops/function/vmap_func.py +3 -2
  150. mindspore/ops/functional.py +24 -14
  151. mindspore/ops/op_info_register.py +3 -3
  152. mindspore/ops/operations/__init__.py +7 -2
  153. mindspore/ops/operations/_grad_ops.py +2 -76
  154. mindspore/ops/operations/_infer_ops.py +1 -1
  155. mindspore/ops/operations/_inner_ops.py +71 -94
  156. mindspore/ops/operations/array_ops.py +14 -146
  157. mindspore/ops/operations/comm_ops.py +63 -53
  158. mindspore/ops/operations/custom_ops.py +83 -19
  159. mindspore/ops/operations/debug_ops.py +42 -10
  160. mindspore/ops/operations/manually_defined/_inner.py +12 -0
  161. mindspore/ops/operations/manually_defined/ops_def.py +273 -20
  162. mindspore/ops/operations/math_ops.py +12 -223
  163. mindspore/ops/operations/nn_ops.py +20 -114
  164. mindspore/ops/operations/other_ops.py +7 -4
  165. mindspore/ops/operations/random_ops.py +46 -1
  166. mindspore/ops/primitive.py +18 -6
  167. mindspore/ops_generate/arg_dtype_cast.py +2 -0
  168. mindspore/ops_generate/gen_aclnn_implement.py +11 -11
  169. mindspore/ops_generate/gen_constants.py +36 -0
  170. mindspore/ops_generate/gen_ops.py +67 -52
  171. mindspore/ops_generate/gen_ops_inner_prim.py +1 -1
  172. mindspore/ops_generate/gen_pyboost_func.py +131 -47
  173. mindspore/ops_generate/op_proto.py +10 -3
  174. mindspore/ops_generate/pyboost_utils.py +14 -1
  175. mindspore/ops_generate/template.py +43 -21
  176. mindspore/parallel/__init__.py +3 -1
  177. mindspore/parallel/_auto_parallel_context.py +31 -9
  178. mindspore/parallel/_cell_wrapper.py +85 -0
  179. mindspore/parallel/_parallel_serialization.py +47 -19
  180. mindspore/parallel/_tensor.py +127 -13
  181. mindspore/parallel/_utils.py +53 -22
  182. mindspore/parallel/algo_parameter_config.py +5 -5
  183. mindspore/parallel/checkpoint_transform.py +46 -39
  184. mindspore/parallel/cluster/process_entity/__init__.py +1 -1
  185. mindspore/parallel/cluster/process_entity/_api.py +31 -23
  186. mindspore/parallel/cluster/process_entity/_utils.py +2 -27
  187. mindspore/parallel/parameter_broadcast.py +3 -4
  188. mindspore/parallel/shard.py +162 -31
  189. mindspore/parallel/transform_safetensors.py +1146 -0
  190. mindspore/profiler/__init__.py +2 -1
  191. mindspore/profiler/common/constant.py +29 -0
  192. mindspore/profiler/common/registry.py +47 -0
  193. mindspore/profiler/common/util.py +28 -0
  194. mindspore/profiler/dynamic_profiler.py +694 -0
  195. mindspore/profiler/envprofiling.py +17 -19
  196. mindspore/profiler/parser/ascend_analysis/constant.py +18 -0
  197. mindspore/profiler/parser/ascend_analysis/file_manager.py +25 -4
  198. mindspore/profiler/parser/ascend_analysis/function_event.py +43 -19
  199. mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +31 -26
  200. mindspore/profiler/parser/ascend_analysis/fwk_file_parser.py +56 -10
  201. mindspore/profiler/parser/ascend_analysis/msprof_timeline_parser.py +55 -8
  202. mindspore/profiler/parser/ascend_analysis/path_manager.py +313 -0
  203. mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +27 -20
  204. mindspore/profiler/parser/ascend_analysis/trace_event_manager.py +9 -2
  205. mindspore/profiler/parser/ascend_msprof_exporter.py +5 -4
  206. mindspore/profiler/parser/ascend_timeline_generator.py +27 -25
  207. mindspore/profiler/parser/base_timeline_generator.py +19 -25
  208. mindspore/profiler/parser/cpu_gpu_timeline_generator.py +25 -12
  209. mindspore/profiler/parser/framework_parser.py +1 -391
  210. mindspore/profiler/parser/gpu_analysis/__init__.py +14 -0
  211. mindspore/profiler/parser/gpu_analysis/function_event.py +44 -0
  212. mindspore/profiler/parser/gpu_analysis/fwk_file_parser.py +89 -0
  213. mindspore/profiler/parser/gpu_analysis/profiler_info_parser.py +72 -0
  214. mindspore/profiler/parser/memory_usage_parser.py +0 -154
  215. mindspore/profiler/parser/profiler_info.py +78 -6
  216. mindspore/profiler/profiler.py +153 -0
  217. mindspore/profiler/profiling.py +285 -413
  218. mindspore/rewrite/__init__.py +1 -2
  219. mindspore/rewrite/common/namespace.py +4 -4
  220. mindspore/rewrite/symbol_tree/symbol_tree.py +3 -3
  221. mindspore/run_check/_check_version.py +39 -104
  222. mindspore/safeguard/rewrite_obfuscation.py +591 -247
  223. mindspore/train/__init__.py +4 -3
  224. mindspore/train/_utils.py +105 -19
  225. mindspore/train/amp.py +171 -53
  226. mindspore/train/callback/__init__.py +2 -2
  227. mindspore/train/callback/_callback.py +4 -4
  228. mindspore/train/callback/_checkpoint.py +97 -31
  229. mindspore/train/callback/_cluster_monitor.py +1 -1
  230. mindspore/train/callback/_flops_collector.py +1 -0
  231. mindspore/train/callback/_loss_monitor.py +3 -3
  232. mindspore/train/callback/_on_request_exit.py +145 -31
  233. mindspore/train/callback/_summary_collector.py +5 -5
  234. mindspore/train/callback/_tft_register.py +375 -0
  235. mindspore/train/dataset_helper.py +15 -3
  236. mindspore/train/metrics/metric.py +3 -3
  237. mindspore/train/metrics/roc.py +4 -4
  238. mindspore/train/mind_ir_pb2.py +44 -39
  239. mindspore/train/model.py +154 -58
  240. mindspore/train/serialization.py +342 -128
  241. mindspore/utils/__init__.py +21 -0
  242. mindspore/utils/utils.py +60 -0
  243. mindspore/version.py +1 -1
  244. {mindspore-2.3.0.dist-info → mindspore-2.4.1.dist-info}/METADATA +13 -7
  245. {mindspore-2.3.0.dist-info → mindspore-2.4.1.dist-info}/RECORD +248 -242
  246. mindspore/include/c_api/ms/abstract.h +0 -67
  247. mindspore/include/c_api/ms/attribute.h +0 -197
  248. mindspore/include/c_api/ms/base/handle_types.h +0 -43
  249. mindspore/include/c_api/ms/base/macros.h +0 -32
  250. mindspore/include/c_api/ms/base/status.h +0 -33
  251. mindspore/include/c_api/ms/base/types.h +0 -283
  252. mindspore/include/c_api/ms/context.h +0 -102
  253. mindspore/include/c_api/ms/graph.h +0 -160
  254. mindspore/include/c_api/ms/node.h +0 -606
  255. mindspore/include/c_api/ms/tensor.h +0 -161
  256. mindspore/include/c_api/ms/value.h +0 -84
  257. mindspore/mindspore_shared_lib.dll +0 -0
  258. mindspore/nn/extend/basic.py +0 -140
  259. mindspore/nn/extend/embedding.py +0 -143
  260. mindspore/nn/extend/layer/normalization.py +0 -109
  261. mindspore/nn/extend/pooling.py +0 -117
  262. mindspore/nn/layer/embedding_service.py +0 -531
  263. mindspore/ops/_op_impl/aicpu/strided_slice_v2.py +0 -93
  264. mindspore/ops/_op_impl/aicpu/strided_slice_v2_grad.py +0 -66
  265. mindspore/ops/extend/__init__.py +0 -53
  266. mindspore/ops/extend/array_func.py +0 -218
  267. mindspore/ops/extend/math_func.py +0 -76
  268. mindspore/ops/extend/nn_func.py +0 -308
  269. mindspore/ops/silent_check.py +0 -162
  270. mindspore/profiler/parser/msadvisor_analyzer.py +0 -82
  271. mindspore/profiler/parser/msadvisor_parser.py +0 -240
  272. mindspore/train/callback/_mindio_ttp.py +0 -443
  273. {mindspore-2.3.0.dist-info → mindspore-2.4.1.dist-info}/WHEEL +0 -0
  274. {mindspore-2.3.0.dist-info → mindspore-2.4.1.dist-info}/entry_points.txt +0 -0
  275. {mindspore-2.3.0.dist-info → mindspore-2.4.1.dist-info}/top_level.txt +0 -0
@@ -14,7 +14,7 @@
14
14
  # ============================================================================
15
15
  """Defines other operators with functional form."""
16
16
  from mindspore.ops import operations as P
17
-
17
+ from mindspore.ops.auto_generate import rotary_position_embedding
18
18
 
19
19
  partial_ = P.Partial()
20
20
  depend_ = P.Depend()
@@ -109,6 +109,7 @@ def depend(value, expr):
109
109
 
110
110
  __all__ = [
111
111
  'depend',
112
- 'partial'
112
+ 'partial',
113
+ 'rotary_position_embedding'
113
114
  ]
114
115
  __all__.sort()
@@ -30,7 +30,8 @@ from mindspore.common.api import _function_forbid_reuse
30
30
  from mindspore.ops.auto_generate import randperm
31
31
  from mindspore.common.generator import default_generator
32
32
  from mindspore.ops.auto_generate import UniformExt, NormalTensorTensor, \
33
- NormalTensorFloat, NormalFloatTensor, NormalFloatFloat, RandExt, RandLikeExt
33
+ NormalTensorFloat, NormalFloatTensor, NormalFloatFloat, RandExt, RandLikeExt, MultinomialExt, \
34
+ Randn, RandnLike, RandInt, RandIntLike, RandpermExt
34
35
 
35
36
  normal_tensor_tensor_op = NormalTensorTensor()
36
37
  normal_tensor_float_op = NormalTensorFloat()
@@ -42,9 +43,15 @@ real_div_ = P.RealDiv()
42
43
  reshape_ = P.Reshape()
43
44
  shape_ = P.Shape()
44
45
  top_k_ = P.TopK()
46
+ randperm_ext_ = RandpermExt()
45
47
  uniform_ = UniformExt()
46
48
  rand_ext_ = RandExt()
47
49
  rand_like_ext_ = RandLikeExt()
50
+ multinomial_ext_ = MultinomialExt()
51
+ randn_ = Randn()
52
+ randn_like_ = RandnLike()
53
+ randint_ = RandInt()
54
+ randint_like_ = RandIntLike()
48
55
  generator_step_ = Tensor(10, mstype.int64)
49
56
 
50
57
 
@@ -116,14 +123,18 @@ def standard_laplace(shape, seed=None):
116
123
  .. math::
117
124
  \text{f}(x) = \frac{1}{2}\exp(-|x|)
118
125
 
126
+ .. warning::
127
+ The Ascend backend does not support the reproducibility of random numbers, so
128
+ the `seed` parameter has no effect.
129
+
119
130
  Args:
120
131
  shape (Union[tuple, Tensor]): The shape of random tensor to be generated. Only constant value is allowed
121
132
  when the input type is tuple. And the operator supports dynamic shape only when the input type is Tensor.
122
133
  seed (int, optional): Seed is used as entropy source for Random number engines generating pseudo-random numbers.
123
- Default: ``None`` , which will be treated as 0.
134
+ Default: ``None`` .
124
135
 
125
136
  Returns:
126
- Tensor. The shape that the input 'shape' denotes. The dtype is float32.
137
+ Tensor. The shape that the input `shape` denotes. The dtype is float32.
127
138
 
128
139
  Raises:
129
140
  TypeError: If shape is neither a tuple nor a Tensor.
@@ -153,6 +164,10 @@ def random_categorical(logits, num_sample, seed=0, dtype=mstype.int64):
153
164
  r"""
154
165
  Generates random samples from a given categorical distribution tensor.
155
166
 
167
+ .. warning::
168
+ The Ascend backend does not support the reproducibility of random numbers, so
169
+ the `seed` parameter has no effect.
170
+
156
171
  Args:
157
172
  logits (Tensor): The input tensor. 2-D Tensor with shape :math:`(batch\_size, num\_classes)`.
158
173
  num_sample (int): Number of sample to be drawn. Only constant values is allowed.
@@ -278,7 +293,8 @@ def uniform_ext(tensor, a, b, generator=None):
278
293
  """
279
294
  if generator is None:
280
295
  generator = default_generator
281
- seed, offset = generator._step(generator_step_) # pylint: disable=protected-access
296
+ seed, offset = generator._step( # pylint: disable=protected-access
297
+ generator_step_)
282
298
  return uniform_(tensor, a, b, seed, offset)
283
299
 
284
300
 
@@ -374,6 +390,10 @@ def standard_normal(shape, seed=None):
374
390
  .. math::
375
391
  f(x)=\frac{1}{\sqrt{2 \pi}} e^{\left(-\frac{x^{2}}{2}\right)}
376
392
 
393
+ .. warning::
394
+ The Ascend backend does not support the reproducibility of random numbers, so
395
+ the `seed` parameter has no effect.
396
+
377
397
  Args:
378
398
  shape (Union[tuple, Tensor]): The shape of random tensor to be generated. Only constant value is allowed
379
399
  when the input type is tuple. And the operator supports dynamic shape only when the input type is Tensor.
@@ -419,8 +439,14 @@ def uniform_candidate_sampler(true_classes,
419
439
  This function samples a set of classes(sampled_candidates) from [0, range_max-1] based on uniform distribution.
420
440
  If unique=True, candidates are drawn without replacement, else unique=False with replacement.
421
441
 
442
+ .. warning::
443
+ - The Ascend backend does not support the reproducibility of random numbers, so
444
+ the `seed` parameter has no effect.
445
+ - The Ascend backend does not support dynamic shape scenarios currently.
446
+
422
447
  Args:
423
448
  true_classes (Tensor): A Tensor. The target classes with a Tensor shape of :math:`(batch\_size, num\_true)` .
449
+ The value range of the elements must be :math:`[0, range\_max)`.
424
450
  num_true (int): The number of target classes in each training example.
425
451
  num_sampled (int): The number of classes to randomly sample. The sampled_candidates will have a shape
426
452
  of num_sampled. If unique=True, num_sampled must be less than or equal to range_max.
@@ -483,11 +509,15 @@ def random_poisson(shape, rate, seed=None, dtype=mstype.float32):
483
509
 
484
510
  \text{P}(i|μ) = \frac{\exp(-μ)μ^{i}}{i!}
485
511
 
512
+ .. warning::
513
+ The Ascend backend does not support the reproducibility of random numbers, so
514
+ the `seed` parameter has no effect.
515
+
486
516
  Args:
487
517
  shape (Tensor): The shape of random tensor to be sampled from each poisson distribution, 1-D `Tensor` whose
488
518
  dtype is mstype.int32 or mstype.int64.
489
519
  rate (Tensor): The :math:`μ` parameter the distribution is constructed with.
490
- It represents the mean of the distribution
520
+ It represents the mean of poisson distribution
491
521
  and also the variance of the distribution. It should be a `Tensor` whose dtype is mstype.int64,
492
522
  mstype.int32, mstype.float64, mstype.float32 or mstype.float16.
493
523
  seed (int, optional): Seed is used as entropy source for the random number engines to generate pseudo-random
@@ -582,6 +612,10 @@ def log_uniform_candidate_sampler(true_classes, num_true=1, num_sampled=5, uniqu
582
612
 
583
613
  Randomly samples a tensor of sampled classes from the range of integers [0, range_max).
584
614
 
615
+ .. warning::
616
+ The Ascend backend does not support the reproducibility of random numbers, so
617
+ the `seed` parameter has no effect.
618
+
585
619
  Args:
586
620
  true_classes (Tensor): The target classes. With data type of int64 and
587
621
  shape :math:`(batch\_size, num\_true)` .
@@ -640,12 +674,16 @@ def choice_with_mask(input_x, count=256, seed=None):
640
674
  The returned index tensor denotes the index of the nonzero
641
675
  sample, the mask tensor denotes which elements in the index tensor are valid.
642
676
 
677
+ .. warning::
678
+ The Ascend backend does not support the reproducibility of random numbers, so
679
+ the `seed` parameter has no effect.
680
+
643
681
  Args:
644
682
  input_x (Tensor[bool]): The input tensor.
645
683
  The input tensor rank must be greater than or equal to 1 and less than or equal to 5.
646
684
  count (int, optional): Number of items expected to get and the number must be greater than 0. Default: ``256`` .
647
685
  seed (int, optional): Seed is used as entropy source for Random number engines generating pseudo-random numbers.
648
- Default: ``None`` , which will be treated as 0.
686
+ Default: ``None`` .
649
687
 
650
688
  Returns:
651
689
  Two tensors, the first one is the index tensor and the other one is the mask tensor.
@@ -724,7 +762,8 @@ def normal_ext(mean=0.0, std=1.0, size=None, generator=None):
724
762
  """
725
763
  if generator is None:
726
764
  generator = default_generator
727
- seed, offset = generator._step(generator_step_) # pylint: disable=protected-access
765
+ seed, offset = generator._step( # pylint: disable=protected-access
766
+ generator_step_)
728
767
 
729
768
  is_mean_tensor = isinstance(mean, Tensor)
730
769
  is_std_tensor = isinstance(std, Tensor)
@@ -743,6 +782,10 @@ def normal(shape, mean, stddev, seed=None):
743
782
  """
744
783
  Generates random numbers according to the Normal (or Gaussian) random number distribution.
745
784
 
785
+ .. warning::
786
+ The Ascend backend does not support the reproducibility of random numbers, so
787
+ the `seed` parameter has no effect.
788
+
746
789
  Args:
747
790
  shape (tuple): The shape of random tensor to be generated.
748
791
  The format is :math:`(N,*)` where :math:`*` means, any number of additional dimensions.
@@ -809,6 +852,10 @@ def laplace(shape, mean, lambda_param, seed=None):
809
852
  .. math::
810
853
  \text{f}(x;μ,λ) = \frac{1}{2λ}\exp(-\frac{|x-μ|}{λ}),
811
854
 
855
+ .. warning::
856
+ The Ascend backend does not support the reproducibility of random numbers, so
857
+ the `seed` parameter has no effect.
858
+
812
859
  Args:
813
860
  shape (tuple): The shape of random tensor to be generated.
814
861
  The format is :math:`(N,*)` where :math:`*` means, any number of additional dimensions.
@@ -856,12 +903,16 @@ def gamma(shape, alpha, beta, seed=None):
856
903
  r"""
857
904
  Generates random numbers according to the Gamma random number distribution.
858
905
 
906
+ .. warning::
907
+ The Ascend backend does not support the reproducibility of random numbers, so
908
+ the `seed` parameter has no effect.
909
+
859
910
  Args:
860
911
  shape (tuple): The shape of random tensor to be generated.
861
912
  alpha (Tensor): The :math:`\alpha` distribution parameter. It should be greater than 0 with float32 data type.
862
913
  beta (Tensor): The :math:`\beta` distribution parameter. It should be greater than 0 with float32 data type.
863
914
  seed (int, optional): Seed is used as entropy source for the random number engines to generate
864
- pseudo-random numbers, must be non-negative. Default: ``None`` , which will be treated as ``0`` .
915
+ pseudo-random numbers, must be non-negative. Default: ``None`` .
865
916
 
866
917
  Returns:
867
918
  Tensor. The shape should be equal to the broadcasted shape between the input `shape` and shapes
@@ -959,6 +1010,10 @@ def rand(*size, dtype=None, seed=None):
959
1010
  Returns a new tensor that fills numbers from the uniform distribution over an interval :math:`[0, 1)`
960
1011
  based on the given shape and dtype.
961
1012
 
1013
+ .. warning::
1014
+ The Ascend backend does not support the reproducibility of random numbers, so
1015
+ the `seed` parameter has no effect.
1016
+
962
1017
  Args:
963
1018
  size (Union[int, tuple(int), list(int)]): Shape of the new tensor, e.g. :math:`(2, 3)` or :math:`2`.
964
1019
 
@@ -1003,6 +1058,10 @@ def rand_like(input, seed=None, *, dtype=None):
1003
1058
  Returns a new tensor that fills numbers from the uniform distribution over an interval :math:`[0, 1)`
1004
1059
  based on the given shape and dtype.
1005
1060
 
1061
+ .. warning::
1062
+ The Ascend backend does not support the reproducibility of random numbers, so
1063
+ the `seed` parameter has no effect.
1064
+
1006
1065
  Args:
1007
1066
  input (Tensor): Input Tensor to specify the output shape and its default dtype.
1008
1067
  seed (int, optional): Random seed, must be greater or equal to 0. Default: ``None`` , and ``0`` will be used.
@@ -1072,13 +1131,14 @@ def rand_ext(*size, generator=None, dtype=None):
1072
1131
  ``Ascend``
1073
1132
 
1074
1133
  Examples:
1075
- >>> import mindspore.ops as ops
1134
+ >>> from mindspore import ops
1076
1135
  >>> print(ops.function.random_func.rand_ext(2, 3).shape)
1077
1136
  (2, 3)
1078
1137
  """
1079
1138
  if not generator:
1080
1139
  generator = default_generator
1081
- seed, offset = generator._step(generator_step_) # pylint: disable=protected-access
1140
+ seed, offset = generator._step( # pylint: disable=protected-access
1141
+ generator_step_)
1082
1142
  return rand_ext_(size, seed, offset, dtype)
1083
1143
 
1084
1144
 
@@ -1112,16 +1172,184 @@ def rand_like_ext(input, *, dtype=None):
1112
1172
  >>> print(ops.function.random_func.rand_like_ext(a, dtype=ms.float32).shape)
1113
1173
  (2, 3)
1114
1174
  """
1115
- seed, offset = default_generator._step(generator_step_) # pylint: disable=protected-access
1175
+ seed, offset = default_generator._step( # pylint: disable=protected-access
1176
+ generator_step_)
1116
1177
  return rand_like_ext_(input, seed, offset, dtype)
1117
1178
 
1118
1179
 
1180
+ @_function_forbid_reuse
1181
+ def randn_ext(*size, generator=None, dtype=None):
1182
+ r"""
1183
+ Returns a new tensor filled with numbers from the normal distribution over an interval :math:`[0, 1)`
1184
+ based on the given shape and dtype.
1185
+
1186
+ .. warning::
1187
+ This is an experimental API that is subject to change or deletion.
1188
+
1189
+ Args:
1190
+ size (Union[int, tuple(int), list(int)]): Shape of the new tensor, e.g. :math:`(2, 3)` or :math:`2`.
1191
+
1192
+ Keyword Args:
1193
+ generator (:class:`mindspore.Generator`, optional): a pseudorandom number generator.
1194
+ Default: ``None``, uses the default pseudorandom number generator.
1195
+ dtype (:class:`mindspore.dtype`, optional): Designated tensor dtype, it must be float type. If None,
1196
+ `mindspore.float32` will be applied. Default: ``None`` .
1197
+
1198
+ Returns:
1199
+ Tensor, with the designated shape and dtype, filled with random numbers from the normal distribution on
1200
+ the interval :math:`[0, 1)`.
1201
+
1202
+ Raises:
1203
+ ValueError: If `dtype` is not a `mstype.float_type` type.
1204
+
1205
+ Supported Platforms:
1206
+ ``Ascend``
1207
+
1208
+ Examples:
1209
+ >>> from mindspore import ops
1210
+ >>> print(ops.function.random_func.randn_ext(2, 3).shape)
1211
+ (2, 3)
1212
+ """
1213
+ if not generator:
1214
+ generator = default_generator
1215
+ seed, offset = generator._step( # pylint: disable=protected-access
1216
+ generator_step_)
1217
+ return randn_(size, seed, offset, dtype)
1218
+
1219
+
1220
+ @_function_forbid_reuse
1221
+ def randn_like_ext(input, *, dtype=None):
1222
+ r"""
1223
+ Returns a new tensor filled with numbers from the normal distribution over an interval :math:`[0, 1)`
1224
+ based on the given dtype and shape of the input tensor.
1225
+
1226
+ .. warning::
1227
+ This is an experimental API that is subject to change or deletion.
1228
+
1229
+ Args:
1230
+ input (Tensor): Input Tensor to specify the output shape and its default dtype.
1231
+
1232
+ Keyword Args:
1233
+ dtype (:class:`mindspore.dtype`, optional): Designated tensor dtype, it must be float type. If None,
1234
+ the same dtype of `input` will be applied. Default: ``None`` .
1235
+
1236
+ Returns:
1237
+ Tensor, with the designated shape and dtype, filled with random numbers from the normal distribution on
1238
+ the interval :math:`[0, 1)`.
1239
+
1240
+ Raises:
1241
+ ValueError: If `dtype` is not a `mstype.float_type` type.
1242
+
1243
+ Supported Platforms:
1244
+ ``Ascend``
1245
+
1246
+ Examples:
1247
+ >>> import mindspore as ms
1248
+ >>> from mindspore import Tensor, ops
1249
+ >>> a = Tensor([[2, 3, 4], [1, 2, 3]])
1250
+ >>> print(ops.function.random_func.randn_like_ext(a, dtype=ms.float32).shape)
1251
+ (2, 3)
1252
+ """
1253
+ seed, offset = default_generator._step( # pylint: disable=protected-access
1254
+ generator_step_)
1255
+ return randn_like_(input, seed, offset, dtype)
1256
+
1257
+
1258
+ @_function_forbid_reuse
1259
+ def randint_ext(low, high, size, *, generator=None, dtype=None):
1260
+ r"""
1261
+ Returns a new tensor filled with integer numbers from the uniform distribution over an interval :math:`[low, high)`
1262
+ based on the given shape and dtype.
1263
+
1264
+ .. warning::
1265
+ This is an experimental API that is subject to change or deletion.
1266
+
1267
+ Args:
1268
+ low (int): the lower bound of the generated random number
1269
+ high (int): the upper bound of the generated random number
1270
+ size (Union[tuple(int), list(int)]): Shape of the new tensor, e.g. :math:`(2, 3)`.
1271
+
1272
+ Keyword Args:
1273
+ generator (:class:`mindspore.Generator`, optional): a pseudorandom number generator.
1274
+ Default: ``None``, uses the default pseudorandom number generator.
1275
+ dtype (:class:`mindspore.dtype`, optional): Designated tensor dtype. If None,
1276
+ `mindspore.int64` will be applied. Default: ``None`` .
1277
+
1278
+ Returns:
1279
+ Tensor, with the designated shape and dtype, filled with random numbers from the uniform distribution on
1280
+ the interval :math:`[low, high)`.
1281
+
1282
+ Raises:
1283
+ TypeError: If `size` is not a tuple.
1284
+ TypeError: If `low` or `high` is not integer.
1285
+
1286
+ Supported Platforms:
1287
+ ``Ascend``
1288
+
1289
+ Examples:
1290
+ >>> from mindspore import ops
1291
+ >>> print(ops.function.random_func.randint_ext(0, 5, (2, 3)).shape)
1292
+ (2, 3)
1293
+ """
1294
+ if not generator:
1295
+ generator = default_generator
1296
+ seed, offset = generator._step( # pylint: disable=protected-access
1297
+ generator_step_)
1298
+ return randint_(low, high, size, seed, offset, dtype)
1299
+
1300
+
1301
+ @_function_forbid_reuse
1302
+ def randint_like_ext(input, low, high, *, dtype=None):
1303
+ r"""
1304
+ Returns a new tensor filled with integer numbers from the uniform distribution over an interval :math:`[low, high)`
1305
+ based on the given dtype and shape of the input tensor.
1306
+
1307
+ .. warning::
1308
+ This is an experimental API that is subject to change or deletion.
1309
+
1310
+ Args:
1311
+ input (Tensor): Input Tensor to specify the output shape and its default dtype.
1312
+ low (int): the lower bound of the generated random number
1313
+ high (int): the upper bound of the generated random number
1314
+
1315
+ Keyword Args:
1316
+ dtype (:class:`mindspore.dtype`, optional): Designated tensor dtype. If None,
1317
+ the same dtype of `input` will be applied. Default: ``None`` .
1318
+
1319
+ Returns:
1320
+ Tensor, with the designated shape and dtype, filled with random numbers from the uniform distribution on
1321
+ the interval :math:`[low, high)`.
1322
+
1323
+ Raises:
1324
+ TypeError: If `low` or `high` is not integer.
1325
+
1326
+ Supported Platforms:
1327
+ ``Ascend``
1328
+
1329
+ Examples:
1330
+ >>> import mindspore as ms
1331
+ >>> from mindspore import Tensor, ops
1332
+ >>> a = Tensor([[2, 3, 4], [1, 2, 3]])
1333
+ >>> low = 0
1334
+ >>> high = 5
1335
+ >>> print(ops.function.random_func.randint_like_ext(a, low, high, dtype=ms.int32).shape)
1336
+ (2, 3)
1337
+ """
1338
+ seed, offset = default_generator._step( # pylint: disable=protected-access
1339
+ generator_step_)
1340
+ return randint_like_(input, low, high, seed, offset, dtype)
1341
+
1342
+
1119
1343
  @_function_forbid_reuse
1120
1344
  def randn(*size, dtype=None, seed=None):
1121
1345
  r"""
1122
1346
  Returns a new Tensor with given shape and dtype, filled with a sample (or samples)
1123
1347
  from the standard normal distribution.
1124
1348
 
1349
+ .. warning::
1350
+ The Ascend backend does not support the reproducibility of random numbers, so
1351
+ the `seed` parameter has no effect.
1352
+
1125
1353
  Args:
1126
1354
  size (Union[int, tuple(int), list(int)]): Shape of the new tensor, e.g., :math:`(2, 3)` or :math:`2`.
1127
1355
 
@@ -1167,6 +1395,10 @@ def randn_like(input, seed=None, *, dtype=None):
1167
1395
  Returns a new Tensor with given shape and dtype, filled with a sample (or samples) from the standard normal
1168
1396
  distribution.
1169
1397
 
1398
+ .. warning::
1399
+ The Ascend backend does not support the reproducibility of random numbers, so
1400
+ the `seed` parameter has no effect.
1401
+
1170
1402
  Args:
1171
1403
  input (Tensor): Input Tensor to specify the output shape and its default dtype.
1172
1404
  seed (int, optional): Random seed, must be greater or equal to 0. Default: ``None`` , and 0 will be used.
@@ -1215,6 +1447,10 @@ def randint(low, high, size, seed=None, *, dtype=None):
1215
1447
  r"""
1216
1448
  Returns a Tensor whose elements are random integers in the range of [ `low` , `high` ) .
1217
1449
 
1450
+ .. warning::
1451
+ The Ascend backend does not support the reproducibility of random numbers, so
1452
+ the `seed` parameter has no effect.
1453
+
1218
1454
  Args:
1219
1455
  low (int): Start value of interval.
1220
1456
  high (int): End value of interval.
@@ -1274,6 +1510,10 @@ def randint_like(input, low, high, seed=None, *, dtype=None):
1274
1510
  Returns a tensor with the same shape as Tensor `input` whose elements are random integers in the range
1275
1511
  of [ `low` , `high` ) .
1276
1512
 
1513
+ .. warning::
1514
+ The Ascend backend does not support the reproducibility of random numbers, so
1515
+ the `seed` parameter has no effect.
1516
+
1277
1517
  Args:
1278
1518
  input (Tensor): Input Tensor to specify the output shape and its default dtype.
1279
1519
  low(int): Start value of interval.
@@ -1328,6 +1568,47 @@ def randint_like(input, low, high, seed=None, *, dtype=None):
1328
1568
  return cast_(output, dtype)
1329
1569
 
1330
1570
 
1571
+ def randperm_ext(n, *, generator=None, dtype=mstype.int64):
1572
+ r"""
1573
+ Generates random permutation of integers from 0 to n-1.
1574
+
1575
+ .. warning::
1576
+ - This is an experimental API that is subject to change or deletion.
1577
+
1578
+
1579
+ Args:
1580
+ n (Union[Tensor, int]): size of the permutation. int or Tensor with shape: () or (1,) and
1581
+ data type int64. The value of `n` must be greater than zero.
1582
+ generator (:class:`mindspore.Generator`, optional): a pseudorandom number generator.
1583
+ Default: ``None``, uses the default pseudorandom number generator.
1584
+ dtype (mindspore.dtype, optional): The type of output. Default: mstype.int64.
1585
+
1586
+ Returns:
1587
+ Tensor with shape (n,) and type `dtype`.
1588
+
1589
+ Raises:
1590
+ TypeError: If `dtype` is not supported.
1591
+ ValueError: If `n` is a negative or 0 element.
1592
+ ValueError: If `n` is larger than the maximal data of the set dtype.
1593
+
1594
+ Supported Platforms:
1595
+ ``Ascend``
1596
+
1597
+ Examples:
1598
+ >>> from mindspore import ops
1599
+ >>> from mindspore import dtype as mstype
1600
+ >>> n = 4
1601
+ >>> output = ops.randperm_ext(n, dtype=mstype.int64)
1602
+ >>> print(output.shape)
1603
+ (4,)
1604
+ """
1605
+ if not generator:
1606
+ generator = default_generator
1607
+ seed, offset = generator._step( # pylint: disable=protected-access
1608
+ generator_step_)
1609
+ return randperm_ext_(n, seed, offset, dtype)
1610
+
1611
+
1331
1612
  @_function_forbid_reuse
1332
1613
  def poisson(shape, mean, seed=None):
1333
1614
  r"""
@@ -1418,6 +1699,10 @@ def multinomial(input, num_samples, replacement=True, seed=None):
1418
1699
  but must be non-negative, finite and have a non-zero sum. When using values as weights, it can be understood as
1419
1700
  normalizing the input along the last dimension.
1420
1701
 
1702
+ .. warning::
1703
+ The Ascend backend does not support the reproducibility of random numbers, so
1704
+ the `seed` parameter has no effect.
1705
+
1421
1706
  Args:
1422
1707
  input (Tensor): The input tensor containing probabilities, must be 1 or 2 dimensions, with
1423
1708
  float32 data type.
@@ -1428,7 +1713,7 @@ def multinomial(input, num_samples, replacement=True, seed=None):
1428
1713
 
1429
1714
  Returns:
1430
1715
  Tensor, has the same rows with input. The number of sampled indices of each row is `num_samples`.
1431
- The dtype is float32.
1716
+ The dtype is int32.
1432
1717
 
1433
1718
  Raises:
1434
1719
  TypeError: If `input` is not a Tensor whose dtype is not float32.
@@ -1506,6 +1791,111 @@ def multinomial(input, num_samples, replacement=True, seed=None):
1506
1791
  return random_nomial(input, num_samples)
1507
1792
 
1508
1793
 
1794
+ @_function_forbid_reuse
1795
+ def multinomial_ext(input, num_samples, replacement=False, *, generator=None):
1796
+ r"""
1797
+ Returns a tensor sampled from the multinomial probability distribution located in the corresponding
1798
+ row of the input tensor.
1799
+
1800
+ The polynomial distribution is a probability distribution that generalizes the binomial distribution formula to
1801
+ multiple states. In the polynomial distribution, each event has a fixed probability, and the sum of these
1802
+ probabilities is 1. The purpose of the `mindspore.mint.multinomial` interface is to perform `num_samples` sampling
1803
+ on the input `input`, and the output tensor is the index of the input tensor for each sampling.
1804
+ The values in `input` represent the probability of selecting the corresponding index for each sampling.
1805
+
1806
+ Here is an extreme example for better understanding. Suppose we have an input probability tensor with
1807
+ values `Tensor([90 / 100, 10 / 100, 0], mindspore.float32)`, which means we can sample three indices,
1808
+ namely index 0, index 1, and index 2, with probabilities of 90%, 10%, and 0%, respectively. We perform n samplings,
1809
+ and the resulting sequence is the calculation result of the polynomial distribution, with a length equal to the
1810
+ number of samplings.
1811
+
1812
+ In case 1 of the sample code, we perform two non-replacement samplings (`replacement` is `False`).
1813
+ The calculation result is most likely `[0, 1]`, and less likely `[1, 0]`. Since the probability of selecting
1814
+ index 0 is 90% for each sampling, the first result is most likely to be index 0. Since the probability of selecting
1815
+ index 2 is 0, index 2 cannot appear in the sampling result. Therefore, the second result must be index 1,
1816
+ and the resulting sequence is `[0, 1]`.
1817
+
1818
+ In case 2 of the sample code, we perform 10 replacement samplings (`replacement` is `True`).
1819
+ As expected, about 90% of the sampling results are index 0.
1820
+
1821
+ In case 3 of the sample code, we extend the input to 2 dimensions, and the sampling results
1822
+ in each dimension also match our sampling expectations.
1823
+
1824
+ Note:
1825
+ The rows of input do not need to sum to one (in which case we use the values as weights),
1826
+ but must be non-negative, finite and have a non-zero sum.
1827
+ When using values as weights, it can be understood as normalizing the input along the last dimension.
1828
+
1829
+ .. warning::
1830
+ This is an experimental API that is subject to change or deletion.
1831
+
1832
+ Args:
1833
+ input (Tensor): The input tensor containing probabilities, must be 1 or 2 dimensions, with float32 data type.
1834
+ num_samples (int): Number of samples to draw.
1835
+ replacement (bool, optional): Whether to draw with replacement or not. Default: ``False`` .
1836
+
1837
+ Keyword Args:
1838
+ generator (generator, optional): MindSpore generator. Default: ``None``.
1839
+
1840
+ Returns:
1841
+ Tensor, dtype is Int64.
1842
+ If `input` is a vector, out is a vector of size `num_samples`.
1843
+ If `input` is a matrix with m rows, out is an matrix of shape(m * num_samples).
1844
+
1845
+ Raises:
1846
+ TypeError: If `input` is not a Tensor whose dtype is not in float16, float32, float64 or bfloat16.
1847
+ , 或是shape为(1, 1)的Tensor
1848
+ TypeError: If `num_samples` is not an int, a Scalar of int
1849
+ or a Tensor with shape[1,] and only one int element.
1850
+ RuntimeError: If :math:`\text{num_samples} <= 0`.
1851
+ RuntimeError: If `replacement` is False, :math:`\text{num_samples} > shape` of the last dimension of `input`.
1852
+ RuntimeError: If shape of the last dimension of `input` exceeds ``2^24``.
1853
+
1854
+ Supported Platforms:
1855
+ ``Ascend``
1856
+
1857
+ Examples:
1858
+ >>> import mindspore
1859
+ >>> from mindspore import Tensor, ops
1860
+ >>> from mindspore import dtype as mstype
1861
+ >>> # case 1: The output is random, and the length of the output is the same as num_sample.
1862
+ >>> # replacement is False.
1863
+ >>> input1 = Tensor([90 / 100, 10 / 100, 0], mindspore.float32)
1864
+ >>> input2 = Tensor([90, 10, 0], mindspore.float32)
1865
+ >>> # input1 and input2 have the same meaning.
1866
+ >>> output1 = ops.multinomial_ext(input1, 2)
1867
+ >>> output2 = ops.multinomial_ext(input2, 2)
1868
+ >>> # print(output1)
1869
+ >>> # [0 1]
1870
+ >>> # print(output2)
1871
+ >>> # [0 1]
1872
+ >>> print(len(output1))
1873
+ 2
1874
+ >>> print(len(output2))
1875
+ 2
1876
+ >>> # case 2: The output is random, and the length of the output is the same as num_sample.
1877
+ >>> # replacement is True.
1878
+ >>> output3 = ops.multinomial_ext(input1, 10, replacement=True)
1879
+ >>> # print(output3)
1880
+ >>> # [0 0 1 0 0 0 0 0 0 0]
1881
+ >>> print(len(output3))
1882
+ 10
1883
+ >>> # case 3: The output is random, and the length of the output is the same as num_sample.
1884
+ >>> # replacement is True.
1885
+ >>> # rank is 2
1886
+ >>> input4 = Tensor([[90, 10, 0], [10, 90, 0]], mstype.float32)
1887
+ >>> output4 = ops.multinomial_ext(input4, 10, replacement=True)
1888
+ >>> # print(output4)
1889
+ >>> # [[0 0 0 0 0 0 0 0 1 0]
1890
+ >>> # [1 1 1 1 1 0 1 1 1 1]]
1891
+ """
1892
+ if generator is None:
1893
+ generator = default_generator
1894
+ seed, offset = generator._step( # pylint: disable=protected-access
1895
+ generator_step_)
1896
+ return multinomial_ext_(input, num_samples, replacement, seed, offset)
1897
+
1898
+
1509
1899
  def _check_shape(input_shape):
1510
1900
  """Check 'shape' value."""
1511
1901
  if not isinstance(input_shape, tuple):
@@ -35,6 +35,8 @@ def reshard(tensor, layout):
35
35
  "sharding_propagation".
36
36
  - In the semi-auto parallel mode, the parallel mode will automatically switch to auto
37
37
  parallel mode with the search mode be set to "sharding_propagation".
38
+ - Currently, configuring multi-dimension and multi-copy reshard strategy in
39
+ mindspore.Layout is not supported.
38
40
 
39
41
  Args:
40
42
  tensor (Tensor): The tensor to be set the sharding strategy.
@@ -55,18 +57,18 @@ def reshard(tensor, layout):
55
57
  >>> from mindspore import ops, nn, Tensor, context, Layout
56
58
  >>> context.set_context(mode=ms.GRAPH_MODE)
57
59
  >>> context.set_auto_parallel_context(parallel_mode=ms.ParallelMode.AUTO_PARALLEL,
58
- >>> search_mode="sharding_propagation")
60
+ ... search_mode="sharding_propagation")
59
61
  >>> class Network(nn.Cell):
60
- >>> def __init__(self):
61
- >>> super().__init__()
62
- >>> self.matmul = ops.MatMul()
63
- >>> self.relu = ops.ReLU()
64
- >>> def construct(self, x, layout):
65
- >>> x = self.relu(x)
66
- >>> x_reshard = ops.reshard(x, self.layout)
67
- >>> y = Tensor(np.ones(shape=(128, 128)), dtype=ms.float32)
68
- >>> x = self.matmul(x_reshard, y)
69
- >>> return x
62
+ ... def __init__(self):
63
+ ... super().__init__()
64
+ ... self.matmul = ops.MatMul()
65
+ ... self.relu = ops.ReLU()
66
+ ... def construct(self, x, layout):
67
+ ... x = self.relu(x)
68
+ ... x_reshard = ops.reshard(x, layout)
69
+ ... y = Tensor(np.ones(shape=(128, 128)), dtype=ms.float32)
70
+ ... x = self.matmul(x_reshard, y)
71
+ ... return x
70
72
  >>>
71
73
  >>> layout = Layout((4, 2), ("dp", "mp"))
72
74
  >>> input_layout = layout("dp", "mp")
@@ -375,7 +375,7 @@ def coo_relu(x: COOTensor) -> COOTensor:
375
375
  Args:
376
376
  x (COOTensor): Input COOTensor with shape :math:`(N, *)`, where :math:`*`
377
377
  means any number of additional dimensions. Its dtype is
378
- `number <https://www.mindspore.cn/docs/en/master/api_python/mindspore.html#mindspore.dtype>`_.
378
+ `number <https://www.mindspore.cn/docs/en/master/api_python/mindspore/mindspore.dtype.html>`_.
379
379
 
380
380
  Returns:
381
381
  COOTensor, has the same shape and dtype as the `x`.