mindspore 2.6.0__cp311-cp311-win_amd64.whl → 2.7.0__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mindspore might be problematic. Click here for more details.

Files changed (455) hide show
  1. mindspore/.commit_id +1 -1
  2. mindspore/Microsoft.VisualStudio.Telemetry.dll +0 -0
  3. mindspore/Newtonsoft.Json.dll +0 -0
  4. mindspore/__init__.py +2 -2
  5. mindspore/_c_dataengine.cp311-win_amd64.pyd +0 -0
  6. mindspore/_c_expression.cp311-win_amd64.pyd +0 -0
  7. mindspore/_c_mindrecord.cp311-win_amd64.pyd +0 -0
  8. mindspore/_checkparam.py +42 -11
  9. mindspore/_extends/builtin_operations.py +3 -3
  10. mindspore/{_deprecated → _extends/optimize}/__init__.py +9 -3
  11. mindspore/_extends/optimize/cell_utils.py +96 -0
  12. mindspore/_extends/parallel_compile/akg_compiler/custom.py +1109 -0
  13. mindspore/_extends/parallel_compile/akg_compiler/gen_custom_op_files.py +1 -1
  14. mindspore/_extends/parse/__init__.py +3 -3
  15. mindspore/_extends/parse/compile_config.py +44 -22
  16. mindspore/_extends/parse/deprecated/deprecated_tensor_method.py +1 -2
  17. mindspore/_extends/parse/parser.py +64 -83
  18. mindspore/_extends/parse/resources.py +39 -0
  19. mindspore/_extends/parse/standard_method.py +47 -14
  20. mindspore/_extends/parse/trope.py +8 -1
  21. mindspore/_extends/pijit/__init__.py +1 -2
  22. mindspore/_extends/pijit/pijit_func_white_list.py +2 -5
  23. mindspore/amp.py +4 -22
  24. mindspore/atlprov.dll +0 -0
  25. mindspore/avcodec-59.dll +0 -0
  26. mindspore/avdevice-59.dll +0 -0
  27. mindspore/avfilter-8.dll +0 -0
  28. mindspore/avformat-59.dll +0 -0
  29. mindspore/avutil-57.dll +0 -0
  30. mindspore/boost/adasum.py +1 -1
  31. mindspore/boost/boost_cell_wrapper.py +4 -4
  32. mindspore/c1.dll +0 -0
  33. mindspore/c1xx.dll +0 -0
  34. mindspore/c2.dll +0 -0
  35. mindspore/common/__init__.py +43 -12
  36. mindspore/common/_grad_function.py +2 -1
  37. mindspore/common/_pijit_context.py +28 -7
  38. mindspore/common/_stub_tensor.py +1 -209
  39. mindspore/common/_tensor_cpp_method.py +1 -1
  40. mindspore/common/_tensor_docs.py +177 -52
  41. mindspore/common/_utils.py +9 -1
  42. mindspore/common/api.py +338 -208
  43. mindspore/common/dtype.py +108 -57
  44. mindspore/common/dump.py +11 -16
  45. mindspore/common/dynamic_shape/__init__.py +0 -0
  46. mindspore/common/{auto_dynamic_shape.py → dynamic_shape/auto_dynamic_shape.py} +17 -23
  47. mindspore/common/dynamic_shape/enable_dynamic.py +197 -0
  48. mindspore/common/file_system.py +59 -9
  49. mindspore/common/generator.py +2 -3
  50. mindspore/common/hook_handle.py +33 -5
  51. mindspore/common/jit_config.py +1 -1
  52. mindspore/common/jit_trace.py +84 -105
  53. mindspore/common/np_dtype.py +3 -3
  54. mindspore/common/parameter.py +27 -29
  55. mindspore/common/recompute.py +5 -7
  56. mindspore/common/sparse_tensor.py +0 -3
  57. mindspore/common/symbol.py +0 -1
  58. mindspore/common/tensor.py +84 -133
  59. mindspore/communication/_comm_helper.py +46 -4
  60. mindspore/communication/management.py +79 -7
  61. mindspore/context.py +47 -38
  62. mindspore/dataset/__init__.py +1 -1
  63. mindspore/dataset/audio/transforms.py +1 -1
  64. mindspore/dataset/core/config.py +38 -4
  65. mindspore/dataset/engine/datasets.py +350 -322
  66. mindspore/dataset/engine/datasets_user_defined.py +69 -23
  67. mindspore/dataset/engine/iterators.py +2 -2
  68. mindspore/dataset/engine/obs/config_loader.py +2 -2
  69. mindspore/dataset/engine/obs/obs_mindrecord_dataset.py +8 -0
  70. mindspore/dataset/transforms/c_transforms.py +2 -2
  71. mindspore/dataset/transforms/py_transforms.py +7 -3
  72. mindspore/dataset/transforms/transforms.py +10 -6
  73. mindspore/dataset/vision/__init__.py +1 -1
  74. mindspore/dataset/vision/py_transforms.py +8 -8
  75. mindspore/dataset/vision/transforms.py +17 -5
  76. mindspore/dataset/vision/utils.py +632 -21
  77. mindspore/dataset/vision/validators.py +1 -0
  78. mindspore/device_context/ascend/device.py +1 -1
  79. mindspore/device_context/ascend/op_tuning.py +35 -1
  80. mindspore/device_context/gpu/__init__.py +2 -2
  81. mindspore/device_context/gpu/device.py +1 -1
  82. mindspore/device_context/gpu/op_precision.py +4 -2
  83. mindspore/device_context/gpu/op_tuning.py +6 -3
  84. mindspore/device_manager.py +16 -9
  85. mindspore/dnnl.dll +0 -0
  86. mindspore/dpcmi.dll +0 -0
  87. mindspore/experimental/llm_boost/ascend_native/llama_boost_ascend_native.py +5 -4
  88. mindspore/experimental/llm_boost/atb/boost_base.py +2 -3
  89. mindspore/experimental/optim/adadelta.py +13 -20
  90. mindspore/experimental/optim/adagrad.py +15 -22
  91. mindspore/experimental/optim/adam.py +17 -24
  92. mindspore/experimental/optim/adamax.py +14 -22
  93. mindspore/experimental/optim/adamw.py +28 -34
  94. mindspore/experimental/optim/asgd.py +15 -25
  95. mindspore/experimental/optim/lr_scheduler.py +27 -45
  96. mindspore/experimental/optim/nadam.py +14 -24
  97. mindspore/experimental/optim/optimizer.py +13 -23
  98. mindspore/experimental/optim/radam.py +18 -24
  99. mindspore/experimental/optim/rmsprop.py +14 -25
  100. mindspore/experimental/optim/rprop.py +15 -26
  101. mindspore/experimental/optim/sgd.py +9 -19
  102. mindspore/hal/__init__.py +4 -4
  103. mindspore/hal/contiguous_tensors_handle.py +2 -2
  104. mindspore/hal/memory.py +1 -0
  105. mindspore/include/api/cell.h +65 -5
  106. mindspore/include/api/cfg.h +24 -7
  107. mindspore/include/api/context.h +1 -0
  108. mindspore/include/api/delegate.h +10 -2
  109. mindspore/include/api/dual_abi_helper.h +100 -19
  110. mindspore/include/api/graph.h +14 -1
  111. mindspore/include/api/kernel.h +16 -3
  112. mindspore/include/api/kernel_api.h +9 -1
  113. mindspore/include/api/metrics/accuracy.h +9 -0
  114. mindspore/include/api/model.h +8 -1
  115. mindspore/include/api/model_group.h +4 -0
  116. mindspore/include/api/model_parallel_runner.h +2 -0
  117. mindspore/include/api/status.h +48 -10
  118. mindspore/include/api/types.h +8 -3
  119. mindspore/include/c_api/model_c.h +0 -58
  120. mindspore/include/c_api/tensor_c.h +0 -26
  121. mindspore/include/dataset/constants.h +9 -0
  122. mindspore/include/dataset/vision_ascend.h +1 -1
  123. mindspore/jpeg62.dll +0 -0
  124. mindspore/mindrecord/tools/cifar10.py +61 -11
  125. mindspore/mindrecord/tools/cifar10_to_mr.py +5 -0
  126. mindspore/mindspore_backend_common.dll +0 -0
  127. mindspore/mindspore_backend_manager.dll +0 -0
  128. mindspore/mindspore_common.dll +0 -0
  129. mindspore/mindspore_core.dll +0 -0
  130. mindspore/mindspore_cpu_res_manager.dll +0 -0
  131. mindspore/mindspore_dump.dll +0 -0
  132. mindspore/mindspore_frontend.dll +0 -0
  133. mindspore/mindspore_glog.dll +0 -0
  134. mindspore/mindspore_memory_pool.dll +0 -0
  135. mindspore/mindspore_ms_backend.dll +0 -0
  136. mindspore/mindspore_ops.dll +0 -0
  137. mindspore/mindspore_ops_host.dll +0 -0
  138. mindspore/mindspore_ops_kernel_common.dll +0 -0
  139. mindspore/mindspore_profiler.dll +0 -0
  140. mindspore/mindspore_pyboost.dll +0 -0
  141. mindspore/mindspore_pynative.dll +0 -0
  142. mindspore/mindspore_res_manager.dll +0 -0
  143. mindspore/mindspore_runtime_pipeline.dll +0 -0
  144. mindspore/mint/__init__.py +4 -44
  145. mindspore/mint/distributed/__init__.py +5 -0
  146. mindspore/mint/distributed/distributed.py +425 -19
  147. mindspore/mint/nn/__init__.py +1 -1
  148. mindspore/mint/nn/functional.py +53 -6
  149. mindspore/mint/nn/layer/_functions.py +163 -294
  150. mindspore/mint/nn/layer/activation.py +8 -6
  151. mindspore/mint/nn/layer/conv.py +125 -101
  152. mindspore/mint/nn/layer/normalization.py +11 -25
  153. mindspore/mint/optim/adam.py +19 -18
  154. mindspore/mint/optim/adamw.py +14 -8
  155. mindspore/mint/optim/sgd.py +5 -5
  156. mindspore/msobj140.dll +0 -0
  157. mindspore/mspdb140.dll +0 -0
  158. mindspore/mspdbcore.dll +0 -0
  159. mindspore/mspdbst.dll +0 -0
  160. mindspore/mspft140.dll +0 -0
  161. mindspore/msvcdis140.dll +0 -0
  162. mindspore/msvcp140_1.dll +0 -0
  163. mindspore/msvcp140_2.dll +0 -0
  164. mindspore/msvcp140_atomic_wait.dll +0 -0
  165. mindspore/msvcp140_codecvt_ids.dll +0 -0
  166. mindspore/nn/cell.py +488 -620
  167. mindspore/nn/grad/cell_grad.py +11 -12
  168. mindspore/nn/layer/activation.py +36 -36
  169. mindspore/nn/layer/basic.py +74 -77
  170. mindspore/nn/layer/channel_shuffle.py +4 -4
  171. mindspore/nn/layer/combined.py +4 -2
  172. mindspore/nn/layer/conv.py +86 -85
  173. mindspore/nn/layer/dense.py +9 -7
  174. mindspore/nn/layer/embedding.py +50 -52
  175. mindspore/nn/layer/image.py +38 -40
  176. mindspore/nn/layer/math.py +111 -112
  177. mindspore/nn/layer/normalization.py +56 -44
  178. mindspore/nn/layer/pooling.py +58 -63
  179. mindspore/nn/layer/rnn_cells.py +33 -33
  180. mindspore/nn/layer/rnns.py +56 -56
  181. mindspore/nn/layer/thor_layer.py +74 -73
  182. mindspore/nn/layer/transformer.py +11 -1
  183. mindspore/nn/learning_rate_schedule.py +20 -20
  184. mindspore/nn/loss/loss.py +79 -81
  185. mindspore/nn/optim/adam.py +2 -4
  186. mindspore/nn/optim/adasum.py +2 -2
  187. mindspore/nn/optim/lamb.py +1 -3
  188. mindspore/nn/optim/optimizer.py +1 -1
  189. mindspore/nn/optim/tft_wrapper.py +2 -3
  190. mindspore/nn/optim/thor.py +2 -2
  191. mindspore/nn/probability/distribution/_utils/utils.py +2 -2
  192. mindspore/nn/probability/distribution/exponential.py +2 -1
  193. mindspore/nn/probability/distribution/poisson.py +2 -1
  194. mindspore/nn/sparse/sparse.py +3 -3
  195. mindspore/nn/wrap/cell_wrapper.py +73 -42
  196. mindspore/nn/wrap/grad_reducer.py +37 -52
  197. mindspore/nn/wrap/loss_scale.py +72 -74
  198. mindspore/numpy/array_creations.py +7 -7
  199. mindspore/numpy/fft.py +1 -1
  200. mindspore/numpy/math_ops.py +1 -1
  201. mindspore/numpy/utils_const.py +1 -1
  202. mindspore/opencv_core452.dll +0 -0
  203. mindspore/opencv_imgcodecs452.dll +0 -0
  204. mindspore/opencv_imgproc452.dll +0 -0
  205. mindspore/ops/_grad_experimental/grad_comm_ops.py +51 -13
  206. mindspore/ops/_grad_experimental/grad_debug_ops.py +14 -0
  207. mindspore/ops/_grad_experimental/grad_inner_ops.py +0 -9
  208. mindspore/ops/_op_impl/cpu/__init__.py +1 -0
  209. mindspore/{experimental/es/__init__.py → ops/_op_impl/cpu/joinedstr_op.py} +12 -6
  210. mindspore/ops/_vmap/vmap_array_ops.py +6 -13
  211. mindspore/ops/_vmap/vmap_nn_ops.py +8 -16
  212. mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +29 -10
  213. mindspore/ops/auto_generate/gen_extend_func.py +5 -55
  214. mindspore/ops/auto_generate/gen_ops_def.py +753 -273
  215. mindspore/ops/auto_generate/gen_ops_prim.py +1687 -958
  216. mindspore/ops/auto_generate/pyboost_inner_prim.py +31 -1
  217. mindspore/ops/composite/__init__.py +10 -0
  218. mindspore/ops/composite/base.py +9 -5
  219. mindspore/ops/composite/multitype_ops/__init__.py +12 -1
  220. mindspore/ops/composite/multitype_ops/_compile_utils.py +132 -108
  221. mindspore/ops/composite/multitype_ops/_constexpr_utils.py +1 -1
  222. mindspore/ops/composite/multitype_ops/add_impl.py +70 -2
  223. mindspore/ops/composite/multitype_ops/div_impl.py +49 -0
  224. mindspore/ops/composite/multitype_ops/floordiv_impl.py +29 -0
  225. mindspore/ops/composite/multitype_ops/getitem_impl.py +11 -0
  226. mindspore/ops/composite/multitype_ops/mod_impl.py +5 -3
  227. mindspore/ops/composite/multitype_ops/mul_impl.py +49 -0
  228. mindspore/ops/composite/multitype_ops/setitem_impl.py +57 -0
  229. mindspore/ops/composite/multitype_ops/sub_impl.py +34 -0
  230. mindspore/ops/composite/multitype_ops/zeros_like_impl.py +14 -0
  231. mindspore/ops/function/__init__.py +4 -1
  232. mindspore/ops/function/_add_attr_func.py +11 -6
  233. mindspore/ops/function/array_func.py +17 -100
  234. mindspore/ops/function/debug_func.py +8 -5
  235. mindspore/ops/function/grad/grad_func.py +5 -13
  236. mindspore/ops/function/math_func.py +65 -399
  237. mindspore/ops/function/nn_func.py +44 -61
  238. mindspore/ops/function/other_func.py +4 -1
  239. mindspore/ops/function/random_func.py +31 -4
  240. mindspore/ops/functional.py +2 -3
  241. mindspore/ops/functional_overload.py +486 -18
  242. mindspore/ops/op_info_register.py +21 -0
  243. mindspore/ops/operations/__init__.py +5 -2
  244. mindspore/ops/operations/_custom_ops_utils.py +675 -8
  245. mindspore/ops/operations/_inner_ops.py +14 -18
  246. mindspore/ops/operations/_sequence_ops.py +1 -1
  247. mindspore/ops/operations/array_ops.py +4 -50
  248. mindspore/ops/operations/comm_ops.py +186 -41
  249. mindspore/ops/operations/custom_ops.py +244 -175
  250. mindspore/ops/operations/debug_ops.py +55 -4
  251. mindspore/ops/operations/image_ops.py +13 -13
  252. mindspore/ops/operations/manually_defined/ops_def.py +27 -28
  253. mindspore/ops/operations/math_ops.py +8 -9
  254. mindspore/ops/operations/nn_ops.py +6 -7
  255. mindspore/ops/primitive.py +9 -20
  256. mindspore/ops/tensor_method.py +52 -11
  257. mindspore/ops_generate/api/cpp_create_prim_instance_helper_generator.py +1 -1
  258. mindspore/ops_generate/api/functional_map_cpp_generator.py +10 -9
  259. mindspore/ops_generate/api/functions_cc_generator.py +58 -10
  260. mindspore/ops_generate/api/tensor_func_reg_cpp_generator.py +1 -1
  261. mindspore/ops_generate/common/base_generator.py +14 -0
  262. mindspore/ops_generate/common/gen_constants.py +7 -2
  263. mindspore/ops_generate/common/gen_utils.py +0 -19
  264. mindspore/ops_generate/common/op_proto.py +11 -4
  265. mindspore/ops_generate/common/template.py +88 -11
  266. mindspore/ops_generate/gen_ops.py +1 -1
  267. mindspore/ops_generate/op_def/lite_ops_cpp_generator.py +4 -4
  268. mindspore/ops_generate/op_def/ops_name_h_generator.py +0 -3
  269. mindspore/ops_generate/op_def/ops_primitive_h_generator.py +0 -4
  270. mindspore/ops_generate/op_def_py/op_prim_py_generator.py +5 -2
  271. mindspore/ops_generate/pyboost/auto_grad_impl_cc_generator.py +49 -8
  272. mindspore/ops_generate/pyboost/auto_grad_reg_cc_generator.py +2 -2
  273. mindspore/ops_generate/pyboost/gen_pyboost_func.py +31 -16
  274. mindspore/ops_generate/pyboost/op_template_parser.py +98 -72
  275. mindspore/ops_generate/pyboost/pyboost_functions_cpp_generator.py +70 -273
  276. mindspore/ops_generate/pyboost/pyboost_functions_h_generator.py +14 -6
  277. mindspore/ops_generate/pyboost/pyboost_functions_impl_cpp_generator.py +316 -0
  278. mindspore/ops_generate/pyboost/pyboost_functions_py_generator.py +1 -1
  279. mindspore/ops_generate/pyboost/pyboost_grad_function_cpp_generator.py +5 -3
  280. mindspore/ops_generate/pyboost/pyboost_inner_prim_generator.py +1 -1
  281. mindspore/ops_generate/pyboost/pyboost_internal_functions_cpp_generator.py +76 -0
  282. mindspore/ops_generate/pyboost/pyboost_internal_functions_h_generator.py +76 -0
  283. mindspore/ops_generate/pyboost/pyboost_internal_kernel_info_adapter_generator.py +125 -0
  284. mindspore/ops_generate/pyboost/pyboost_native_grad_functions_generator.py +4 -3
  285. mindspore/ops_generate/pyboost/pyboost_op_cpp_code_generator.py +348 -61
  286. mindspore/ops_generate/pyboost/pyboost_overload_functions_cpp_generator.py +1 -1
  287. mindspore/ops_generate/pyboost/pyboost_utils.py +118 -9
  288. mindspore/ops_generate/tensor_py_cc_generator.py +1 -24
  289. mindspore/parallel/_auto_parallel_context.py +9 -17
  290. mindspore/parallel/_cell_wrapper.py +106 -40
  291. mindspore/parallel/_parallel_serialization.py +4 -3
  292. mindspore/parallel/_ps_context.py +4 -6
  293. mindspore/parallel/_tensor.py +167 -12
  294. mindspore/parallel/_transformer/moe.py +1 -1
  295. mindspore/parallel/_transformer/transformer.py +17 -12
  296. mindspore/parallel/_utils.py +5 -11
  297. mindspore/parallel/auto_parallel.py +33 -12
  298. mindspore/parallel/checkpoint_convert.py +3 -3
  299. mindspore/parallel/checkpoint_transform.py +5 -1
  300. mindspore/parallel/cluster/process_entity/_api.py +88 -49
  301. mindspore/parallel/cluster/process_entity/_utils.py +95 -7
  302. mindspore/parallel/cluster/run.py +48 -7
  303. mindspore/parallel/function/__init__.py +8 -1
  304. mindspore/parallel/function/reshard_func.py +7 -6
  305. mindspore/parallel/nn/__init__.py +15 -2
  306. mindspore/parallel/nn/parallel_cell_wrapper.py +50 -14
  307. mindspore/parallel/nn/parallel_grad_reducer.py +7 -14
  308. mindspore/parallel/shard.py +9 -23
  309. mindspore/parallel/transform_safetensors.py +468 -174
  310. mindspore/pgodb140.dll +0 -0
  311. mindspore/pgort140.dll +0 -0
  312. mindspore/profiler/__init__.py +2 -1
  313. mindspore/profiler/analysis/parser/timeline_assembly_factory/ascend_timeline_assembler.py +7 -7
  314. mindspore/profiler/analysis/parser/timeline_assembly_factory/base_timeline_assembler.py +3 -0
  315. mindspore/profiler/analysis/parser/timeline_assembly_factory/trace_view_container.py +3 -0
  316. mindspore/profiler/analysis/parser/timeline_creator/cpu_op_timeline_creator.py +3 -3
  317. mindspore/profiler/analysis/parser/timeline_creator/fwk_timeline_creator.py +3 -3
  318. mindspore/profiler/analysis/parser/timeline_creator/msprof_timeline_creator.py +4 -4
  319. mindspore/profiler/analysis/parser/timeline_creator/scope_layer_timeline_creator.py +3 -3
  320. mindspore/profiler/analysis/parser/timeline_event/fwk_event.py +4 -1
  321. mindspore/profiler/analysis/parser/timeline_event/timeline_event_pool.py +2 -1
  322. mindspore/profiler/analysis/task_manager.py +1 -1
  323. mindspore/profiler/analysis/viewer/ascend_communication_viewer.py +5 -1
  324. mindspore/profiler/analysis/viewer/ascend_integrate_viewer.py +2 -1
  325. mindspore/profiler/analysis/viewer/ascend_kernel_details_viewer.py +10 -9
  326. mindspore/profiler/analysis/viewer/ascend_op_memory_viewer.py +43 -23
  327. mindspore/profiler/analysis/viewer/ascend_step_trace_time_viewer.py +3 -2
  328. mindspore/profiler/analysis/viewer/ms_minddata_viewer.py +9 -5
  329. mindspore/profiler/analysis/viewer/ms_operator_details_viewer.py +132 -0
  330. mindspore/profiler/common/constant.py +16 -0
  331. mindspore/profiler/common/msprof_cmd_tool.py +2 -2
  332. mindspore/profiler/common/path_manager.py +9 -0
  333. mindspore/profiler/common/profiler_context.py +50 -29
  334. mindspore/profiler/common/profiler_info.py +0 -16
  335. mindspore/profiler/common/profiler_meta_data.py +1 -0
  336. mindspore/profiler/common/profiler_op_analyse.py +239 -0
  337. mindspore/profiler/common/profiler_output_path.py +23 -8
  338. mindspore/profiler/common/profiler_parameters.py +128 -35
  339. mindspore/profiler/dynamic_profile/__init__.py +0 -0
  340. mindspore/profiler/dynamic_profile/dynamic_monitor_proxy.py +39 -0
  341. mindspore/profiler/dynamic_profile/dynamic_profiler_config_context.py +666 -0
  342. mindspore/profiler/dynamic_profile/dynamic_profiler_utils.py +62 -0
  343. mindspore/profiler/dynamic_profiler.py +374 -338
  344. mindspore/profiler/envprofiler.py +42 -12
  345. mindspore/profiler/experimental_config.py +112 -7
  346. mindspore/profiler/mstx.py +33 -12
  347. mindspore/profiler/platform/__init__.py +2 -3
  348. mindspore/profiler/platform/cpu_profiler.py +10 -4
  349. mindspore/profiler/platform/npu_profiler.py +30 -20
  350. mindspore/profiler/profiler.py +218 -154
  351. mindspore/profiler/profiler_action_controller.py +65 -77
  352. mindspore/profiler/profiler_interface.py +2 -2
  353. mindspore/profiler/schedule.py +10 -4
  354. mindspore/rewrite/common/config.py +1 -0
  355. mindspore/rewrite/common/namer.py +1 -0
  356. mindspore/rewrite/common/namespace.py +1 -0
  357. mindspore/rewrite/node/node.py +31 -11
  358. mindspore/rewrite/parsers/assign_parser.py +1 -1
  359. mindspore/rewrite/symbol_tree/symbol_tree.py +2 -2
  360. mindspore/run_check/_check_version.py +7 -10
  361. mindspore/runtime/__init__.py +8 -6
  362. mindspore/runtime/event.py +10 -4
  363. mindspore/runtime/executor.py +87 -45
  364. mindspore/runtime/memory.py +22 -30
  365. mindspore/runtime/thread_bind_core.py +299 -165
  366. mindspore/safeguard/rewrite_obfuscation.py +12 -13
  367. mindspore/swresample-4.dll +0 -0
  368. mindspore/swscale-6.dll +0 -0
  369. mindspore/tbbmalloc.dll +0 -0
  370. mindspore/tinyxml2.dll +0 -0
  371. mindspore/train/_utils.py +9 -5
  372. mindspore/train/amp.py +43 -23
  373. mindspore/train/callback/__init__.py +5 -5
  374. mindspore/train/callback/_callback.py +2 -1
  375. mindspore/train/callback/_checkpoint.py +4 -14
  376. mindspore/train/callback/_flops_collector.py +11 -7
  377. mindspore/train/callback/_landscape.py +0 -1
  378. mindspore/train/callback/_train_fault_tolerance.py +72 -18
  379. mindspore/train/data_sink.py +15 -6
  380. mindspore/train/dataset_helper.py +14 -5
  381. mindspore/train/model.py +49 -47
  382. mindspore/train/serialization.py +168 -126
  383. mindspore/train/summary/summary_record.py +13 -2
  384. mindspore/train/train_thor/model_thor.py +2 -2
  385. mindspore/turbojpeg.dll +0 -0
  386. mindspore/utils/__init__.py +3 -2
  387. mindspore/utils/dryrun.py +0 -6
  388. mindspore/utils/runtime_execution_order_check.py +162 -78
  389. mindspore/utils/sdc_detect.py +68 -0
  390. mindspore/utils/utils.py +14 -17
  391. mindspore/vcmeta.dll +0 -0
  392. mindspore/vcruntime140.dll +0 -0
  393. mindspore/vcruntime140_1.dll +0 -0
  394. mindspore/version.py +1 -1
  395. {mindspore-2.6.0.dist-info → mindspore-2.7.0.dist-info}/METADATA +5 -4
  396. {mindspore-2.6.0.dist-info → mindspore-2.7.0.dist-info}/RECORD +400 -439
  397. mindspore/_deprecated/jit.py +0 -198
  398. mindspore/_extends/remote/kernel_build_server_ascend.py +0 -75
  399. mindspore/communication/_hccl_management.py +0 -297
  400. mindspore/experimental/es/embedding_service.py +0 -891
  401. mindspore/experimental/es/embedding_service_layer.py +0 -581
  402. mindspore/profiler/common/validator/__init__.py +0 -14
  403. mindspore/profiler/common/validator/validate_path.py +0 -84
  404. mindspore/profiler/parser/__init__.py +0 -14
  405. mindspore/profiler/parser/aicpu_data_parser.py +0 -272
  406. mindspore/profiler/parser/ascend_analysis/__init__.py +0 -14
  407. mindspore/profiler/parser/ascend_analysis/constant.py +0 -71
  408. mindspore/profiler/parser/ascend_analysis/file_manager.py +0 -180
  409. mindspore/profiler/parser/ascend_analysis/function_event.py +0 -185
  410. mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +0 -136
  411. mindspore/profiler/parser/ascend_analysis/fwk_file_parser.py +0 -131
  412. mindspore/profiler/parser/ascend_analysis/msprof_timeline_parser.py +0 -104
  413. mindspore/profiler/parser/ascend_analysis/path_manager.py +0 -313
  414. mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +0 -123
  415. mindspore/profiler/parser/ascend_analysis/tlv_decoder.py +0 -86
  416. mindspore/profiler/parser/ascend_analysis/trace_event_manager.py +0 -75
  417. mindspore/profiler/parser/ascend_cluster_generator.py +0 -116
  418. mindspore/profiler/parser/ascend_communicate_generator.py +0 -314
  419. mindspore/profiler/parser/ascend_flops_generator.py +0 -116
  420. mindspore/profiler/parser/ascend_fpbp_generator.py +0 -82
  421. mindspore/profiler/parser/ascend_hccl_generator.py +0 -271
  422. mindspore/profiler/parser/ascend_integrate_generator.py +0 -42
  423. mindspore/profiler/parser/ascend_memory_generator.py +0 -185
  424. mindspore/profiler/parser/ascend_msprof_exporter.py +0 -282
  425. mindspore/profiler/parser/ascend_msprof_generator.py +0 -187
  426. mindspore/profiler/parser/ascend_op_generator.py +0 -334
  427. mindspore/profiler/parser/ascend_steptrace_generator.py +0 -94
  428. mindspore/profiler/parser/ascend_timeline_generator.py +0 -545
  429. mindspore/profiler/parser/base_timeline_generator.py +0 -483
  430. mindspore/profiler/parser/container.py +0 -229
  431. mindspore/profiler/parser/cpu_gpu_timeline_generator.py +0 -697
  432. mindspore/profiler/parser/flops_parser.py +0 -531
  433. mindspore/profiler/parser/framework_enum.py +0 -111
  434. mindspore/profiler/parser/framework_parser.py +0 -464
  435. mindspore/profiler/parser/framework_struct.py +0 -61
  436. mindspore/profiler/parser/gpu_analysis/__init__.py +0 -14
  437. mindspore/profiler/parser/gpu_analysis/function_event.py +0 -44
  438. mindspore/profiler/parser/gpu_analysis/fwk_file_parser.py +0 -89
  439. mindspore/profiler/parser/gpu_analysis/profiler_info_parser.py +0 -72
  440. mindspore/profiler/parser/hccl_parser.py +0 -573
  441. mindspore/profiler/parser/hwts_log_parser.py +0 -122
  442. mindspore/profiler/parser/integrator.py +0 -526
  443. mindspore/profiler/parser/memory_usage_parser.py +0 -277
  444. mindspore/profiler/parser/minddata_analyzer.py +0 -800
  445. mindspore/profiler/parser/minddata_parser.py +0 -186
  446. mindspore/profiler/parser/minddata_pipeline_parser.py +0 -299
  447. mindspore/profiler/parser/op_intermediate_parser.py +0 -149
  448. mindspore/profiler/parser/optime_parser.py +0 -250
  449. mindspore/profiler/parser/profiler_info.py +0 -213
  450. mindspore/profiler/parser/step_trace_parser.py +0 -666
  451. mindspore/utils/hooks.py +0 -81
  452. /mindspore/common/{_auto_dynamic.py → dynamic_shape/_auto_dynamic.py} +0 -0
  453. {mindspore-2.6.0.dist-info → mindspore-2.7.0.dist-info}/WHEEL +0 -0
  454. {mindspore-2.6.0.dist-info → mindspore-2.7.0.dist-info}/entry_points.txt +0 -0
  455. {mindspore-2.6.0.dist-info → mindspore-2.7.0.dist-info}/top_level.txt +0 -0
@@ -21,7 +21,7 @@ from mindspore.communication._comm_helper import Backend, _get_rank_helper, _get
21
21
  _get_world_rank_from_group_rank_helper, _get_group_rank_from_world_rank_helper, \
22
22
  _create_group_helper, _destroy_group_helper, HCCL_WORLD_COMM_GROUP, NCCL_WORLD_COMM_GROUP, \
23
23
  MCCL_WORLD_COMM_GROUP, DEVICE_TO_BACKEND, _get_local_rank_helper, _get_local_size_helper, GlobalComm, \
24
- _check_mpi_envs, _set_elegant_exit_handle, _get_group_ranks, _get_comm_name_helper
24
+ _check_mpi_envs, _set_elegant_exit_handle, _get_group_ranks, _get_comm_name_helper, _comm_switch_nic_helper
25
25
  from mindspore._c_expression import init_hccl, finalize_hccl, init_cluster, MSContext, ms_ctx_param
26
26
  from mindspore.hal.device import is_initialized
27
27
 
@@ -116,7 +116,7 @@ def _check_hccl():
116
116
  except Exception as e:
117
117
  logger.error(f"Check hccl failed: {e}")
118
118
  raise RuntimeError("\"hccl\" wheel was not installed correctly. For details, refer to the installation "
119
- "guidelines: https://www.mindspore.cn/install")
119
+ "guidelines: https://www.mindspore.cn/install") from e
120
120
 
121
121
 
122
122
  def init(backend_name=None):
@@ -545,7 +545,7 @@ def get_group_rank_from_world_rank(world_rank_id, group):
545
545
  return _get_group_rank_from_world_rank_helper(world_rank_id=world_rank_id, group=group)
546
546
 
547
547
 
548
- def create_group(group, rank_ids):
548
+ def create_group(group, rank_ids, options=None):
549
549
  """
550
550
  Create a user collective communication group.
551
551
 
@@ -558,6 +558,18 @@ def create_group(group, rank_ids):
558
558
  Args:
559
559
  group (str): The name of the communication group to be created.
560
560
  rank_ids (list): A list of device IDs.
561
+ options (GroupOptions, optional): Additional communication group configuration parameters.
562
+ The backend will automatically select supported parameters and apply them during group
563
+ initialization. i.e. for the ``HCCL`` backend, ``hccl_config`` can be specified so that
564
+ group initialization configurations can be applied. Default is ``None``.
565
+
566
+ `GroupOptions` is defined as a class that can be instantiated as a python object.
567
+
568
+ .. code-block::
569
+
570
+ GroupOptions {
571
+ hccl_config(dict)
572
+ }
561
573
 
562
574
  Raises:
563
575
  TypeError: If group is not a string or `rank_ids` is not a list.
@@ -578,22 +590,24 @@ def create_group(group, rank_ids):
578
590
  for more details.
579
591
 
580
592
  >>> import mindspore as ms
581
- >>> from mindspore import set_context
582
- >>> from mindspore import ops
593
+ >>> from mindspore import set_context, ops
594
+ >>> from mindspore._c_expression import GroupOptions
583
595
  >>> from mindspore.communication import init, create_group, get_rank
584
596
  >>> set_context(mode=ms.GRAPH_MODE)
585
597
  >>> ms.set_device(device_target="Ascend")
586
598
  >>> init()
587
599
  >>> group = "0-7"
588
600
  >>> rank_ids = [0,7]
601
+ >>> options = GroupOptions()
602
+ >>> options.hccl_config = {"hccl_buffer_size": 400}
589
603
  >>> if get_rank() in rank_ids:
590
- ... create_group(group, rank_ids)
604
+ ... create_group(group, rank_ids, options)
591
605
  ... allreduce = ops.AllReduce(group)
592
606
  """
593
607
  if not isinstance(group, str):
594
608
  raise TypeError("For 'create_group', the argument 'group' must be type of string, "
595
609
  "but got 'group' type : {}.".format(type(group)))
596
- _create_group_helper(group, rank_ids)
610
+ _create_group_helper(group, rank_ids, options)
597
611
 
598
612
 
599
613
  def destroy_group(group):
@@ -740,3 +754,61 @@ def get_process_group_ranks(group=GlobalComm.WORLD_COMM_GROUP):
740
754
 
741
755
  """
742
756
  return _get_group_ranks(group=_get_group(group))
757
+
758
+
759
+ def _comm_switch_nic(global_ranks, use_backup):
760
+ """Switch network interface card between the primary and the secondary NIC.
761
+
762
+ Args:
763
+ global_ranks (list[int], tuple[int]): list of integers. The global rank ids that need switch network interface .
764
+ use_backup (list[bool], tuple[int]): list of bool. For each rank id in global_ranks, determine whether to use
765
+ the backup network interface card. True means use, False means not use.
766
+
767
+ Returns:
768
+ bool, whether the network card switch is successful.
769
+ If one fails, return False. If all are successful, return True.
770
+
771
+ Supported Platforms:
772
+ ``Ascend``
773
+
774
+ Examples:
775
+ .. note::
776
+ Before running the following examples, you need to configure the communication environment variables.
777
+
778
+ For Ascend/GPU/CPU devices, it is recommended to use the msrun startup method
779
+ without any third-party or configuration file dependencies.
780
+
781
+ Please see the `msrun start up
782
+ <https://www.mindspore.cn/tutorials/en/master/parallel/msrun_launcher.html>`_
783
+ for more details.
784
+
785
+ This example should be run with 4 devices.
786
+
787
+ >>> import numpy as np
788
+ >>> from mindspore.communication import init, _comm_switch_nic
789
+ >>> from mindspore.communication.management import _comm_switch_nic
790
+ >>>
791
+ >>> init()
792
+ >>> ret = _comm_switch_nic([0, 1], [True, False])
793
+ >>> print(ret)
794
+ True
795
+
796
+ """
797
+ max_rank = get_group_size() - 1
798
+ if not all(isinstance(i, (list, tuple)) for i in (global_ranks, use_backup)):
799
+ raise ValueError(f"For _comm_switch_nic, the args 'global_ranks' and 'use_backup' should be list or tuple, "
800
+ f"but got 'global_ranks' type {type(global_ranks)}, 'use_backup' type {type(use_backup)}")
801
+ if not all(isinstance(rank, int) and not isinstance(rank, bool) and rank <= max_rank for rank in global_ranks):
802
+ raise ValueError(f"For _comm_switch_nic, the all elements in 'global_ranks' should be int number, and less "
803
+ f"than {get_group_size()}, but got 'global_ranks' : {global_ranks}.")
804
+ if not all(isinstance(ub, bool) for ub in use_backup):
805
+ raise ValueError(f"For _comm_switch_nic, the all elements in 'use_backup' should be bool, but got "
806
+ f"'use_backup' : {use_backup}.")
807
+ if len(set(global_ranks)) != len(global_ranks):
808
+ raise ValueError(f"For _comm_switch_nic, the all elements in 'global_ranks' should be different, but got "
809
+ f"'global_ranks' : {global_ranks}.")
810
+ if len(global_ranks) != len(use_backup):
811
+ raise ValueError(f"For _comm_switch_nic, the elements number in 'global_ranks' should be equal to 'use_backup',"
812
+ f" but got 'global_ranks' {len(global_ranks)} elements: {global_ranks},"
813
+ f" 'use_backup' {len(use_backup)} elements: {use_backup},.")
814
+ return _comm_switch_nic_helper(global_ranks, use_backup)
mindspore/context.py CHANGED
@@ -204,13 +204,6 @@ class _Context:
204
204
  if mode == PYNATIVE_MODE:
205
205
  if self.enable_debug_runtime:
206
206
  self.set_backend_policy("vm")
207
- parallel_mode = _get_auto_parallel_context("parallel_mode")
208
- if parallel_mode not in (ParallelMode.DATA_PARALLEL, ParallelMode.STAND_ALONE, ParallelMode.AUTO_PARALLEL):
209
- raise ValueError(f"Got {parallel_mode}, when the user enabled SEMI_AUTO_PARALELL, "
210
- f"pynative mode dose not support, you should set either "
211
- f"context.set_auto_parallel_context(parallel_mode='data_parallel'), "
212
- f"context.set_auto_parallel_context(parallel_mode='stand_alone') "
213
- f"or context.set_auto_parallel_context(parallel_mode='auto_parallel').")
214
207
  self._context_switches.push(True, None)
215
208
  elif mode == GRAPH_MODE:
216
209
  if self.enable_debug_runtime:
@@ -290,6 +283,8 @@ class _Context:
290
283
  raise ValueError(f"For 'context.set_context', the argument 'deterministic' must be one of "
291
284
  f"{deterministic_options}, but got {deterministic}.")
292
285
 
286
+ logger.info(f"Set deterministic setting to '{deterministic}'.")
287
+
293
288
  # Must wait for all async created groups to be initialized so that
294
289
  # deterministic feature could be consistent between all processes.
295
290
  CollectiveManager.get_instance().wait_all_comm_init()
@@ -603,12 +598,12 @@ class _Context:
603
598
  def set_mempool_block_size(self, mempool_block_size):
604
599
  """Set the block size of memory pool."""
605
600
  global_jit_config = get_jit_config()
606
- is_force_kbk = False
601
+ is_ge = False
607
602
  if global_jit_config:
608
- is_force_kbk = global_jit_config.get('jit_level') == "O0" or global_jit_config.get('jit_level') == "O1"
609
- if _get_mode() == GRAPH_MODE and not is_force_kbk:
610
- logger.warning("Graph mode doesn't support to set parameter 'mempool_block_size' of context currently, "
611
- "you can use context.set_context to set pynative mode or set jit_level=O0/O1.")
603
+ is_ge = global_jit_config.get('backend') == "GE" or global_jit_config.get('jit_level') == "O2"
604
+ if is_ge:
605
+ logger.warning("GE doesn't support to set parameter 'mempool_block_size' of context currently, "
606
+ "you can use pynative mode or set jit_level=O0/O1.")
612
607
  return
613
608
  if not Validator.check_str_by_regular(mempool_block_size, _RE_PATTERN):
614
609
  raise ValueError("For 'context.set_context', the argument 'mempool_block_size' should be in "
@@ -838,6 +833,26 @@ class _Context:
838
833
  raise TypeError(f"For step num, the value type should be int, but got {type(step)}, {step}")
839
834
  self.set_param(ms_ctx_param.last_triggered_step, step)
840
835
 
836
+ @staticmethod
837
+ def _check_speedup_config_str_value(key, value):
838
+ """check speedup config str value"""
839
+ if key in ["pp_1f1b_overlap", "recompute_comm_overlap", "recomputation_communication_overlap",
840
+ "matmul_grad_comm_overlap", "grad_matmul_communication_overlap"]:
841
+ if isinstance(value, str):
842
+ values = value.split(",")
843
+ for v in values:
844
+ if v not in ['AlltoAll', 'AlltoAllV', 'MorphAllGather', 'AllReduce',
845
+ 'AllGather', 'ReduceScatter', 'MorphReduceScatter', '']:
846
+ raise ValueError("{} 's value should be subset of ['AlltoAll', 'AlltoAllV',"
847
+ " 'MorphAllGather', 'AllGather', 'ReduceScatter',"
848
+ " 'MorphReduceScatter', 'AllReduce'].".format(key))
849
+ return value
850
+ if value:
851
+ return "AlltoAll,AlltoAllV,AllGather,ReduceScatter,AllReduce"
852
+ return ""
853
+
854
+ return value
855
+
841
856
  def _set_speedup_config_path(self, speedup_config_path):
842
857
  """"Check and set speedup config for auto parallel."""
843
858
  if speedup_config_path is None or speedup_config_path == "":
@@ -848,10 +863,10 @@ class _Context:
848
863
  f"{speedup_config_real_path} does not exist, please check whether the "
849
864
  f"'parallel_speed_up_json_path' is correct.")
850
865
  try:
851
- valid_option = {"recompute_comm_overlap": (ms_ctx_param.recompute_comm_overlap, bool),
852
- "recomputation_communication_overlap": (ms_ctx_param.recompute_comm_overlap, bool),
853
- "matmul_grad_comm_overlap": (ms_ctx_param.matmul_grad_comm_overlap, bool),
854
- "grad_matmul_communication_overlap": (ms_ctx_param.matmul_grad_comm_overlap, bool),
866
+ valid_option = {"recompute_comm_overlap": (ms_ctx_param.recompute_comm_overlap, str),
867
+ "recomputation_communication_overlap": (ms_ctx_param.recompute_comm_overlap, str),
868
+ "matmul_grad_comm_overlap": (ms_ctx_param.matmul_grad_comm_overlap, (bool, str)),
869
+ "grad_matmul_communication_overlap": (ms_ctx_param.matmul_grad_comm_overlap, (bool, str)),
855
870
  "enable_task_opt": (ms_ctx_param.enable_task_opt, bool),
856
871
  "enable_communication_fusion": (ms_ctx_param.enable_task_opt, bool),
857
872
  "enable_grad_comm_opt": (ms_ctx_param.enable_grad_comm_opt, bool),
@@ -908,17 +923,12 @@ class _Context:
908
923
  f"Please use '{name_replace.get(key)}' instead.")
909
924
  set_func, valid_type = valid_option.get(key)
910
925
  if not isinstance(value, valid_type):
911
- raise TypeError(f"The value type of {key} must be {valid_type}, "
912
- f"but got value is {value} and type is {type(value)}.")
913
- if key == "pp_1f1b_overlap":
914
- values = value.split(",")
915
- for v in values:
916
- if v not in ['AlltoAll', 'AlltoAllV', 'MorphAllGather',
917
- 'AllGather', 'ReduceScatter', 'MorphReduceScatter']:
918
- raise ValueError("{} 's value should be subset of ['AlltoAll', 'AlltoAllV',"
919
- " 'MorphAllGather', 'AllGather', 'ReduceScatter',"
920
- " 'MorphReduceScatter'].".format(key))
921
- self.set_param(set_func, value)
926
+ if not ((key == "recompute_comm_overlap" or key == "recomputation_communication_overlap")
927
+ and isinstance(value, bool)):
928
+ raise TypeError(f"The value type of {key} must be {valid_type}, "
929
+ f"but got value is {value} and type is {type(value)}.")
930
+ value_new = self._check_speedup_config_str_value(key, value)
931
+ self.set_param(set_func, value_new)
922
932
  except (TypeError, ValueError) as exo:
923
933
  raise ValueError(str(exo) + "\nFor 'context.set_context', "
924
934
  "open or load the 'speedup_config_path' file {} "
@@ -1071,8 +1081,8 @@ def set_auto_parallel_context(**kwargs):
1071
1081
 
1072
1082
  - pipeline_interleave(bool): Indicates whether to enable the interleaved execution mode.
1073
1083
  - pipeline_scheduler(str): Indicates the scheduling mode for pipeline parallelism. Only support
1074
- ``gpipe/1f1b/seqpipe/seqvpp/seqsmartvpp``. When applying seqsmartvpp, the pipeline parallel
1075
- must be an even number.
1084
+ ``gpipe/1f1b/seqpipe/seqvpp/seqsmartvpp/zero_bubble_v``. When applying seqsmartvpp,
1085
+ the pipeline parallel must be an even number.
1076
1086
  parallel_optimizer_config (dict): A dict contains the keys and values for setting the parallel optimizer
1077
1087
  configure. The configure provides more detailed behavior control about parallel training
1078
1088
  when parallel optimizer is enabled. The configure will be effective when we use
@@ -1557,7 +1567,8 @@ def set_context(**kwargs):
1557
1567
  check_bprop (bool): This parameter will be deprecated and removed in future versions.
1558
1568
  enable_reduce_precision (bool): This parameter will be deprecated and removed in a future versions.
1559
1569
  grad_for_scalar (bool): This parameter will be deprecated and removed in future versions.
1560
- support_binary (bool): Whether to support run .pyc or .so in graph mode.
1570
+ support_binary (bool): Whether to support run .pyc or .so in graph mode. This parameter will be deprecated and
1571
+ removed in a future version. Please use the environment variable `MS_SUPPORT_BINARY` instead.
1561
1572
 
1562
1573
  Examples:
1563
1574
  >>> import mindspore as ms
@@ -1769,8 +1780,9 @@ def set_ps_context(**kwargs):
1769
1780
  Default: ``False`` .
1770
1781
  config_file_path (str): Configuration file path used by recovery, parameter server training mode only
1771
1782
  supports Server disaster recovery currently. Default: ``''`` .
1772
- scheduler_manage_port (int): Scheduler manage port used to scale out/in. Default: ``11202`` .
1773
- enable_ssl (bool): Set PS SSL mode enabled or disabled. Default: ``False`` .
1783
+ enable_ssl (bool): Set PS SSL mode enabled or disabled. Default: ``False``.
1784
+ When set to False, users need to review and confirm the security of network environment
1785
+ where the distributed job is located.
1774
1786
  client_password (str): Password to decrypt the secret key stored in the client certificate. Default: ``''`` .
1775
1787
  server_password (str): Password to decrypt the secret key stored in the server certificate. Default: ``''`` .
1776
1788
 
@@ -1779,7 +1791,7 @@ def set_ps_context(**kwargs):
1779
1791
 
1780
1792
  Examples:
1781
1793
  >>> import mindspore as ms
1782
- >>> ms.set_ps_context(enable_ps=True, enable_ssl=True, client_password='123456', server_password='123456')
1794
+ >>> ms.set_ps_context(enable_ps=True, enable_ssl=True, client_password='', server_password='')
1783
1795
  """
1784
1796
  _set_ps_context(**kwargs)
1785
1797
 
@@ -1796,12 +1808,9 @@ def get_ps_context(attr_key):
1796
1808
  - config_file_path (str, optional): Configuration file path used by recovery,
1797
1809
  parameter server training mode only
1798
1810
  supports Server disaster recovery currently. Default: ``''`` .
1799
- - scheduler_manage_port (int, optional): Scheduler manage port used to scale out/in. Default: ``11202`` .
1800
1811
  - enable_ssl (bool, optional): Set PS SSL mode enabled or disabled. Default: ``False`` .
1801
- - client_password (str, optional): Password to decrypt the secret key stored in the client certificate.
1802
- Default: ``''`` .
1803
- - server_password (str, optional): Password to decrypt the secret key stored in the server certificate.
1804
- Default: ``''`` .
1812
+ When set to False, users need to review and confirm the security of network environment
1813
+ where the distributed job is located.
1805
1814
 
1806
1815
  Returns:
1807
1816
  Returns attribute value according to the key.
@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
  """
15
15
  At the heart of MindSpore data loading utility is the `mindspore.dataset` module.
16
- It is a `dataset engine <https://www.mindspore.cn/docs/en/master/design/data_engine.html>`_ based on pipline design.
16
+ It is a `dataset engine <https://www.mindspore.cn/docs/en/master/features/data_engine.html>`_ based on pipline design.
17
17
 
18
18
  This module provides the following data loading methods to help users load datasets into MindSpore.
19
19
 
@@ -2793,7 +2793,7 @@ class PhaseVocoder(AudioTensorOperation):
2793
2793
  Raises:
2794
2794
  TypeError: If `rate` is not of type float.
2795
2795
  ValueError: If `rate` is not a positive number.
2796
- TypeError: If `phase_advance` is not of type :class:`numpy.ndarray` .
2796
+ TypeError: If `phase_advance` is not of type `numpy.ndarray` .
2797
2797
  RuntimeError: If input tensor is not in shape of <..., freq, num_frame, complex=2>.
2798
2798
 
2799
2799
  Supported Platforms:
@@ -34,7 +34,6 @@ from mindspore.dataset.core.validator_helpers import replace_none, type_check, c
34
34
  from mindspore.dataset.debug import DebugHook, PrintMetaDataHook
35
35
  from mindspore.dataset.core.validator_helpers import check_independent_mode
36
36
 
37
-
38
37
  __all__ = ['set_sending_batches', 'load', '_init_device_info',
39
38
  'set_seed', 'get_seed',
40
39
  'set_prefetch_size', 'get_prefetch_size',
@@ -1097,12 +1096,12 @@ def get_error_samples_mode():
1097
1096
  return _CDE_TO_PYTHON_ERROR_SAMPLES_MODE.get(_config.get_error_samples_mode())
1098
1097
 
1099
1098
 
1100
- def set_iterator_mode(do_copy=True, parallel_convert=False):
1099
+ def set_iterator_mode(do_copy=False, parallel_convert=False):
1101
1100
  """
1102
1101
  Select dataset iterator optimization strategy.
1103
1102
 
1104
1103
  Args:
1105
- do_copy (bool): Whether dataset iterator creates a Tensor from numpy.ndarray without copy. Default: "True".
1104
+ do_copy (bool): Whether dataset iterator creates a Tensor from numpy.ndarray without copy. Default: "False".
1106
1105
  parallel_convert (bool): Whether dataset iterator starts a thread to organize Tensors to output.
1107
1106
  Default: "False".
1108
1107
 
@@ -1122,7 +1121,7 @@ def set_iterator_mode(do_copy=True, parallel_convert=False):
1122
1121
  def get_iterator_mode():
1123
1122
  """
1124
1123
  Get dataset iterator mode indicate iterator optimization strategy.
1125
- If `set_iterator_mode` is never called before, `do_copy` default to "True", `parallel_convert` default to "False".
1124
+ If `set_iterator_mode` is never called before, `do_copy` default to "False", `parallel_convert` default to "False".
1126
1125
 
1127
1126
  Returns:
1128
1127
  dict, iterator mode dictionary contains the value of `do_copy` and `parallel_convert`.
@@ -1174,3 +1173,38 @@ def get_multiprocessing_start_method():
1174
1173
  >>> multiprocessing_start_method = ds.config.get_multiprocessing_start_method()
1175
1174
  """
1176
1175
  return _config.get_multiprocessing_start_method()
1176
+
1177
+
1178
+ def set_video_backend(backend):
1179
+ """
1180
+ Set the backend used to decode videos.
1181
+
1182
+ Args:
1183
+ backend (str): Type of the video backend. It can be "CPU" or "Ascend".
1184
+
1185
+ Raises:
1186
+ TypeError: If `backend` is not of type str.
1187
+ ValueError: If `backend` is not "CPU" or "Ascend".
1188
+
1189
+ Examples:
1190
+ >>> import mindspore.dataset as ds
1191
+ >>> ds.config.set_video_backend("CPU")
1192
+ """
1193
+
1194
+ type_check(backend, (str,), "backend")
1195
+ check_valid_str(backend, ["CPU", "Ascend"], "backend")
1196
+ _config.set_video_backend(backend)
1197
+
1198
+
1199
+ def get_video_backend():
1200
+ """
1201
+ Returns the currently active backend used to decode videos.
1202
+
1203
+ Returns:
1204
+ str, backend used to decode videos.
1205
+
1206
+ Examples:
1207
+ >>> import mindspore.dataset as ds
1208
+ >>> backend = ds.config.get_video_backend()
1209
+ """
1210
+ return _config.get_video_backend()