mindspore 2.4.1__cp310-cp310-win_amd64.whl → 2.5.0__cp310-cp310-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mindspore might be problematic. Click here for more details.

Files changed (372) hide show
  1. mindspore/.commit_id +1 -1
  2. mindspore/__init__.py +8 -3
  3. mindspore/_c_dataengine.cp310-win_amd64.pyd +0 -0
  4. mindspore/_c_expression.cp310-win_amd64.pyd +0 -0
  5. mindspore/_c_mindrecord.cp310-win_amd64.pyd +0 -0
  6. mindspore/_checkparam.py +0 -5
  7. mindspore/_extends/parallel_compile/akg_compiler/gen_custom_op_files.py +1 -1
  8. mindspore/_extends/parse/compile_config.py +64 -0
  9. mindspore/_extends/parse/deprecated/__init__.py +0 -0
  10. mindspore/_extends/parse/deprecated/deprecated_tensor_method.py +375 -0
  11. mindspore/_extends/parse/parser.py +23 -5
  12. mindspore/_extends/parse/standard_method.py +123 -27
  13. mindspore/_extends/pijit/pijit_func_white_list.py +1 -1
  14. mindspore/amp.py +7 -1
  15. mindspore/avcodec-59.dll +0 -0
  16. mindspore/avdevice-59.dll +0 -0
  17. mindspore/avfilter-8.dll +0 -0
  18. mindspore/avformat-59.dll +0 -0
  19. mindspore/avutil-57.dll +0 -0
  20. mindspore/boost/boost_cell_wrapper.py +136 -41
  21. mindspore/common/__init__.py +3 -1
  22. mindspore/common/_register_for_tensor.py +0 -1
  23. mindspore/common/_stub_tensor.py +25 -4
  24. mindspore/common/_tensor_cpp_method.py +17 -0
  25. mindspore/common/_tensor_docs.py +6132 -0
  26. mindspore/common/api.py +99 -25
  27. mindspore/common/dtype.py +34 -34
  28. mindspore/common/dump.py +2 -1
  29. mindspore/common/file_system.py +8 -1
  30. mindspore/common/generator.py +2 -0
  31. mindspore/common/hook_handle.py +3 -1
  32. mindspore/common/initializer.py +3 -4
  33. mindspore/common/lazy_inline.py +8 -2
  34. mindspore/common/mindir_util.py +10 -2
  35. mindspore/common/parameter.py +30 -27
  36. mindspore/common/tensor.py +713 -1337
  37. mindspore/communication/__init__.py +1 -1
  38. mindspore/communication/_comm_helper.py +10 -0
  39. mindspore/communication/comm_func.py +215 -173
  40. mindspore/communication/management.py +23 -20
  41. mindspore/context.py +292 -193
  42. mindspore/dataset/__init__.py +23 -19
  43. mindspore/dataset/callback/ds_callback.py +2 -1
  44. mindspore/dataset/core/config.py +84 -3
  45. mindspore/dataset/engine/cache_admin.py +3 -3
  46. mindspore/dataset/engine/cache_client.py +5 -4
  47. mindspore/dataset/engine/datasets.py +192 -149
  48. mindspore/dataset/engine/datasets_audio.py +14 -0
  49. mindspore/dataset/engine/datasets_standard_format.py +28 -11
  50. mindspore/dataset/engine/datasets_text.py +38 -1
  51. mindspore/dataset/engine/datasets_user_defined.py +125 -65
  52. mindspore/dataset/engine/datasets_vision.py +81 -8
  53. mindspore/dataset/engine/iterators.py +281 -63
  54. mindspore/dataset/engine/obs/util.py +8 -0
  55. mindspore/dataset/engine/queue.py +40 -0
  56. mindspore/dataset/engine/samplers.py +26 -2
  57. mindspore/dataset/engine/serializer_deserializer.py +1 -1
  58. mindspore/dataset/engine/validators.py +43 -11
  59. mindspore/dataset/transforms/py_transforms_util.py +17 -0
  60. mindspore/dataset/transforms/transforms.py +29 -12
  61. mindspore/dataset/vision/validators.py +1 -2
  62. mindspore/device_context/__init__.py +21 -0
  63. mindspore/device_context/ascend/__init__.py +25 -0
  64. mindspore/device_context/ascend/device.py +72 -0
  65. mindspore/device_context/ascend/op_debug.py +94 -0
  66. mindspore/device_context/ascend/op_precision.py +193 -0
  67. mindspore/device_context/ascend/op_tuning.py +127 -0
  68. mindspore/device_context/cpu/__init__.py +25 -0
  69. mindspore/device_context/cpu/device.py +62 -0
  70. mindspore/device_context/cpu/op_tuning.py +43 -0
  71. mindspore/device_context/gpu/__init__.py +21 -0
  72. mindspore/device_context/gpu/device.py +70 -0
  73. mindspore/device_context/gpu/op_precision.py +67 -0
  74. mindspore/device_context/gpu/op_tuning.py +175 -0
  75. mindspore/device_manager.py +134 -0
  76. mindspore/dnnl.dll +0 -0
  77. mindspore/experimental/llm_boost/__init__.py +3 -2
  78. mindspore/experimental/llm_boost/ascend_native/__init__.py +22 -0
  79. mindspore/experimental/llm_boost/ascend_native/llama_boost_ascend_native.py +211 -0
  80. mindspore/experimental/llm_boost/ascend_native/llm_boost.py +52 -0
  81. mindspore/experimental/llm_boost/atb/boost_base.py +239 -64
  82. mindspore/experimental/llm_boost/atb/llama_boost.py +52 -30
  83. mindspore/experimental/llm_boost/atb/qwen_boost.py +47 -24
  84. mindspore/experimental/llm_boost/register.py +1 -0
  85. mindspore/experimental/optim/adadelta.py +26 -22
  86. mindspore/experimental/optim/adam.py +3 -0
  87. mindspore/experimental/optim/lr_scheduler.py +33 -24
  88. mindspore/experimental/optim/radam.py +33 -30
  89. mindspore/hal/device.py +28 -0
  90. mindspore/hal/event.py +17 -0
  91. mindspore/hal/memory.py +94 -3
  92. mindspore/hal/stream.py +91 -6
  93. mindspore/include/api/context.h +1 -2
  94. mindspore/include/dataset/constants.h +2 -2
  95. mindspore/jpeg62.dll +0 -0
  96. mindspore/log.py +12 -0
  97. mindspore/mindrecord/__init__.py +1 -1
  98. mindspore/mindrecord/config.py +17 -316
  99. mindspore/mindrecord/filereader.py +1 -9
  100. mindspore/mindrecord/filewriter.py +5 -15
  101. mindspore/mindrecord/mindpage.py +1 -9
  102. mindspore/mindspore_backend.dll +0 -0
  103. mindspore/mindspore_common.dll +0 -0
  104. mindspore/mindspore_core.dll +0 -0
  105. mindspore/mindspore_glog.dll +0 -0
  106. mindspore/mindspore_ops.dll +0 -0
  107. mindspore/mint/__init__.py +824 -218
  108. mindspore/mint/distributed/__init__.py +66 -4
  109. mindspore/mint/distributed/distributed.py +2594 -44
  110. mindspore/mint/linalg/__init__.py +6 -0
  111. mindspore/mint/nn/__init__.py +473 -14
  112. mindspore/mint/nn/functional.py +486 -11
  113. mindspore/mint/nn/layer/__init__.py +17 -4
  114. mindspore/mint/nn/layer/_functions.py +330 -0
  115. mindspore/mint/nn/layer/activation.py +169 -1
  116. mindspore/mint/nn/layer/basic.py +123 -0
  117. mindspore/mint/nn/layer/conv.py +727 -0
  118. mindspore/mint/nn/layer/normalization.py +215 -19
  119. mindspore/mint/nn/layer/padding.py +797 -0
  120. mindspore/mint/nn/layer/pooling.py +170 -0
  121. mindspore/mint/optim/__init__.py +2 -1
  122. mindspore/mint/optim/adam.py +223 -0
  123. mindspore/mint/optim/adamw.py +26 -19
  124. mindspore/mint/special/__init__.py +2 -1
  125. mindspore/multiprocessing/__init__.py +5 -0
  126. mindspore/nn/__init__.py +2 -0
  127. mindspore/nn/cell.py +142 -21
  128. mindspore/nn/dynamic_lr.py +2 -1
  129. mindspore/nn/layer/activation.py +6 -6
  130. mindspore/nn/layer/basic.py +35 -25
  131. mindspore/nn/layer/channel_shuffle.py +3 -3
  132. mindspore/nn/layer/conv.py +3 -0
  133. mindspore/nn/layer/embedding.py +3 -3
  134. mindspore/nn/layer/normalization.py +8 -7
  135. mindspore/nn/layer/padding.py +4 -3
  136. mindspore/nn/layer/pooling.py +55 -23
  137. mindspore/nn/layer/rnn_cells.py +1 -1
  138. mindspore/nn/layer/rnns.py +2 -1
  139. mindspore/nn/layer/timedistributed.py +5 -5
  140. mindspore/nn/layer/transformer.py +48 -26
  141. mindspore/nn/learning_rate_schedule.py +5 -3
  142. mindspore/nn/loss/loss.py +31 -36
  143. mindspore/nn/optim/ada_grad.py +1 -0
  144. mindspore/nn/optim/adadelta.py +2 -2
  145. mindspore/nn/optim/adam.py +1 -1
  146. mindspore/nn/optim/lars.py +1 -4
  147. mindspore/nn/optim/optimizer.py +1 -1
  148. mindspore/nn/optim/rprop.py +2 -2
  149. mindspore/nn/optim/thor.py +2 -1
  150. mindspore/nn/utils/__init__.py +22 -0
  151. mindspore/nn/utils/init.py +73 -0
  152. mindspore/nn/wrap/cell_wrapper.py +4 -6
  153. mindspore/nn/wrap/loss_scale.py +3 -4
  154. mindspore/numpy/array_creations.py +60 -62
  155. mindspore/numpy/array_ops.py +148 -143
  156. mindspore/numpy/logic_ops.py +41 -42
  157. mindspore/numpy/math_ops.py +361 -359
  158. mindspore/numpy/utils.py +16 -16
  159. mindspore/numpy/utils_const.py +4 -4
  160. mindspore/opencv_core452.dll +0 -0
  161. mindspore/opencv_imgcodecs452.dll +0 -0
  162. mindspore/opencv_imgproc452.dll +0 -0
  163. mindspore/ops/__init__.py +2 -1
  164. mindspore/ops/_grad_experimental/grad_comm_ops.py +107 -8
  165. mindspore/ops/_grad_experimental/grad_debug_ops.py +6 -1
  166. mindspore/ops/_grad_experimental/grad_inner_ops.py +9 -0
  167. mindspore/ops/_grad_experimental/grad_math_ops.py +2 -1
  168. mindspore/ops/_op_impl/cpu/__init__.py +1 -0
  169. mindspore/ops/_op_impl/cpu/raise_op.py +28 -0
  170. mindspore/ops/_vmap/vmap_array_ops.py +20 -19
  171. mindspore/ops/_vmap/vmap_base.py +0 -2
  172. mindspore/ops/_vmap/vmap_grad_nn_ops.py +19 -13
  173. mindspore/ops/_vmap/vmap_math_ops.py +11 -9
  174. mindspore/ops/_vmap/vmap_nn_ops.py +20 -34
  175. mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +149 -12
  176. mindspore/ops/auto_generate/gen_arg_handler.py +0 -61
  177. mindspore/ops/auto_generate/gen_extend_func.py +554 -60
  178. mindspore/ops/auto_generate/gen_ops_def.py +1621 -115
  179. mindspore/ops/auto_generate/gen_ops_prim.py +8027 -3411
  180. mindspore/ops/auto_generate/pyboost_inner_prim.py +183 -79
  181. mindspore/ops/composite/base.py +1 -1
  182. mindspore/ops/composite/multitype_ops/_compile_utils.py +229 -30
  183. mindspore/ops/composite/multitype_ops/pow_impl.py +0 -29
  184. mindspore/ops/function/__init__.py +12 -0
  185. mindspore/ops/function/array_func.py +561 -159
  186. mindspore/ops/function/clip_func.py +64 -0
  187. mindspore/ops/function/debug_func.py +28 -20
  188. mindspore/ops/function/image_func.py +1 -1
  189. mindspore/ops/function/linalg_func.py +5 -4
  190. mindspore/ops/function/math_func.py +1664 -294
  191. mindspore/ops/function/nn_func.py +988 -317
  192. mindspore/ops/function/parameter_func.py +3 -56
  193. mindspore/ops/function/random_func.py +243 -33
  194. mindspore/ops/function/sparse_unary_func.py +1 -1
  195. mindspore/ops/functional.py +18 -5
  196. mindspore/ops/functional_overload.py +897 -0
  197. mindspore/ops/operations/__init__.py +3 -2
  198. mindspore/ops/operations/_embedding_cache_ops.py +4 -4
  199. mindspore/ops/operations/_grad_ops.py +2 -34
  200. mindspore/ops/operations/_infer_ops.py +2 -1
  201. mindspore/ops/operations/_inner_ops.py +38 -8
  202. mindspore/ops/operations/array_ops.py +45 -303
  203. mindspore/ops/operations/comm_ops.py +23 -17
  204. mindspore/ops/operations/custom_ops.py +7 -49
  205. mindspore/ops/operations/debug_ops.py +42 -47
  206. mindspore/ops/operations/inner_ops.py +6 -4
  207. mindspore/ops/operations/linalg_ops.py +3 -2
  208. mindspore/ops/operations/manually_defined/ops_def.py +185 -104
  209. mindspore/ops/operations/math_ops.py +11 -216
  210. mindspore/ops/operations/nn_ops.py +153 -310
  211. mindspore/ops/primitive.py +23 -21
  212. mindspore/ops/tensor_method.py +1669 -0
  213. mindspore/ops_generate/aclnn_kernel_register_auto_cc_generator.py +110 -0
  214. mindspore/ops_generate/add_tensor_docs_generator.py +54 -0
  215. mindspore/ops_generate/arg_handler.py +0 -61
  216. mindspore/ops_generate/auto_grad_impl_cc_generator.py +135 -0
  217. mindspore/ops_generate/auto_grad_reg_cc_generator.py +93 -0
  218. mindspore/ops_generate/base_generator.py +11 -0
  219. mindspore/ops_generate/cpp_create_prim_instance_helper_generator.py +108 -0
  220. mindspore/ops_generate/functional_map_cpp_generator.py +491 -0
  221. mindspore/ops_generate/functional_overload_py_generator.py +110 -0
  222. mindspore/ops_generate/functions_cc_generator.py +233 -0
  223. mindspore/ops_generate/gen_aclnn_implement.py +110 -114
  224. mindspore/ops_generate/gen_constants.py +157 -3
  225. mindspore/ops_generate/gen_ops.py +245 -990
  226. mindspore/ops_generate/gen_pyboost_func.py +97 -998
  227. mindspore/ops_generate/gen_utils.py +119 -33
  228. mindspore/ops_generate/lite_ops_cpp_generator.py +155 -0
  229. mindspore/ops_generate/op_api_proto.py +206 -0
  230. mindspore/ops_generate/op_def_py_generator.py +131 -0
  231. mindspore/ops_generate/op_prim_py_generator.py +480 -0
  232. mindspore/ops_generate/op_proto.py +373 -108
  233. mindspore/ops_generate/op_template_parser.py +436 -0
  234. mindspore/ops_generate/ops_def_cc_generator.py +288 -0
  235. mindspore/ops_generate/ops_def_h_generator.py +74 -0
  236. mindspore/ops_generate/ops_name_h_generator.py +68 -0
  237. mindspore/ops_generate/ops_primitive_h_generator.py +81 -0
  238. mindspore/ops_generate/pyboost_functions_cpp_generator.py +370 -0
  239. mindspore/ops_generate/pyboost_functions_h_generator.py +68 -0
  240. mindspore/ops_generate/pyboost_functions_py_generator.py +148 -0
  241. mindspore/ops_generate/pyboost_grad_function_cpp_generator.py +154 -0
  242. mindspore/ops_generate/pyboost_inner_prim_generator.py +131 -0
  243. mindspore/ops_generate/pyboost_native_grad_functions_generator.py +268 -0
  244. mindspore/ops_generate/pyboost_op_cpp_code_generator.py +851 -0
  245. mindspore/ops_generate/pyboost_overload_functions_cpp_generator.py +344 -0
  246. mindspore/ops_generate/pyboost_utils.py +92 -33
  247. mindspore/ops_generate/template.py +294 -44
  248. mindspore/ops_generate/tensor_func_reg_cpp_generator.py +422 -0
  249. mindspore/parallel/__init__.py +3 -3
  250. mindspore/parallel/_auto_parallel_context.py +44 -34
  251. mindspore/parallel/_cell_wrapper.py +22 -3
  252. mindspore/parallel/_parallel_serialization.py +13 -2
  253. mindspore/parallel/_utils.py +4 -2
  254. mindspore/parallel/algo_parameter_config.py +1 -1
  255. mindspore/parallel/checkpoint_transform.py +44 -0
  256. mindspore/parallel/cluster/process_entity/_api.py +131 -37
  257. mindspore/parallel/cluster/process_entity/_utils.py +41 -6
  258. mindspore/parallel/cluster/run.py +20 -3
  259. mindspore/parallel/parameter_broadcast.py +1 -1
  260. mindspore/parallel/shard.py +3 -0
  261. mindspore/parallel/transform_safetensors.py +119 -253
  262. mindspore/profiler/__init__.py +17 -4
  263. mindspore/profiler/analysis/__init__.py +0 -0
  264. mindspore/profiler/analysis/parser/__init__.py +0 -0
  265. mindspore/profiler/analysis/parser/ascend_cann_parser.py +166 -0
  266. mindspore/profiler/analysis/parser/base_parser.py +158 -0
  267. mindspore/profiler/analysis/parser/framework_cann_relation_parser.py +45 -0
  268. mindspore/profiler/analysis/parser/ms_framework_parser.py +142 -0
  269. mindspore/profiler/analysis/parser/ms_minddata_parser.py +145 -0
  270. mindspore/profiler/analysis/parser/timeline_assembly_factory/__init__.py +0 -0
  271. mindspore/profiler/analysis/parser/timeline_assembly_factory/ascend_timeline_assembler.py +261 -0
  272. mindspore/profiler/analysis/parser/timeline_assembly_factory/base_timeline_assembler.py +40 -0
  273. mindspore/profiler/analysis/parser/timeline_assembly_factory/trace_view_container.py +84 -0
  274. mindspore/profiler/analysis/parser/timeline_creator/__init__.py +0 -0
  275. mindspore/profiler/analysis/parser/timeline_creator/base_timeline_creator.py +44 -0
  276. mindspore/profiler/analysis/parser/timeline_creator/cpu_op_timeline_creator.py +90 -0
  277. mindspore/profiler/analysis/parser/timeline_creator/fwk_timeline_creator.py +76 -0
  278. mindspore/profiler/analysis/parser/timeline_creator/msprof_timeline_creator.py +103 -0
  279. mindspore/profiler/analysis/parser/timeline_creator/scope_layer_timeline_creator.py +134 -0
  280. mindspore/profiler/analysis/parser/timeline_event/__init__.py +0 -0
  281. mindspore/profiler/analysis/parser/timeline_event/base_event.py +233 -0
  282. mindspore/profiler/analysis/parser/timeline_event/cpu_op_event.py +47 -0
  283. mindspore/profiler/analysis/parser/timeline_event/flow_event.py +36 -0
  284. mindspore/profiler/analysis/parser/timeline_event/fwk_event.py +260 -0
  285. mindspore/profiler/analysis/parser/timeline_event/msprof_event.py +73 -0
  286. mindspore/profiler/analysis/parser/timeline_event/scope_layer_event.py +53 -0
  287. mindspore/profiler/analysis/parser/timeline_event/timeline_event_pool.py +146 -0
  288. mindspore/profiler/analysis/task_manager.py +131 -0
  289. mindspore/profiler/analysis/time_converter.py +84 -0
  290. mindspore/profiler/analysis/viewer/__init__.py +0 -0
  291. mindspore/profiler/analysis/viewer/ascend_communication_viewer.py +333 -0
  292. mindspore/profiler/analysis/viewer/ascend_integrate_viewer.py +87 -0
  293. mindspore/profiler/analysis/viewer/ascend_kernel_details_viewer.py +252 -0
  294. mindspore/profiler/analysis/viewer/ascend_memory_viewer.py +313 -0
  295. mindspore/profiler/analysis/viewer/ascend_op_memory_viewer.py +322 -0
  296. mindspore/profiler/analysis/viewer/ascend_step_trace_time_viewer.py +265 -0
  297. mindspore/profiler/analysis/viewer/ascend_timeline_viewer.py +58 -0
  298. mindspore/profiler/analysis/viewer/base_viewer.py +26 -0
  299. mindspore/profiler/analysis/viewer/ms_dataset_viewer.py +97 -0
  300. mindspore/profiler/analysis/viewer/ms_minddata_viewer.py +581 -0
  301. mindspore/profiler/analysis/work_flow.py +73 -0
  302. mindspore/profiler/common/ascend_msprof_exporter.py +138 -0
  303. mindspore/profiler/common/command_executor.py +90 -0
  304. mindspore/profiler/common/constant.py +174 -3
  305. mindspore/profiler/common/file_manager.py +208 -0
  306. mindspore/profiler/common/log.py +130 -0
  307. mindspore/profiler/common/msprof_cmd_tool.py +202 -0
  308. mindspore/profiler/common/path_manager.py +371 -0
  309. mindspore/profiler/common/process_bar.py +168 -0
  310. mindspore/profiler/common/process_pool.py +9 -3
  311. mindspore/profiler/common/profiler_context.py +476 -0
  312. mindspore/profiler/common/profiler_info.py +304 -0
  313. mindspore/profiler/common/profiler_output_path.py +284 -0
  314. mindspore/profiler/common/profiler_parameters.py +210 -0
  315. mindspore/profiler/common/profiler_path_manager.py +120 -0
  316. mindspore/profiler/common/record_function.py +76 -0
  317. mindspore/profiler/common/tlv_decoder.py +76 -0
  318. mindspore/profiler/common/util.py +75 -2
  319. mindspore/profiler/dynamic_profiler.py +270 -37
  320. mindspore/profiler/envprofiler.py +138 -0
  321. mindspore/profiler/mstx.py +199 -0
  322. mindspore/profiler/platform/__init__.py +21 -0
  323. mindspore/profiler/platform/base_profiler.py +40 -0
  324. mindspore/profiler/platform/cpu_profiler.py +124 -0
  325. mindspore/profiler/platform/gpu_profiler.py +74 -0
  326. mindspore/profiler/platform/npu_profiler.py +309 -0
  327. mindspore/profiler/profiler.py +580 -93
  328. mindspore/profiler/profiler_action_controller.py +187 -0
  329. mindspore/profiler/profiler_interface.py +114 -0
  330. mindspore/profiler/schedule.py +208 -0
  331. mindspore/rewrite/api/symbol_tree.py +1 -2
  332. mindspore/run_check/_check_version.py +18 -13
  333. mindspore/runtime/__init__.py +37 -0
  334. mindspore/runtime/device.py +27 -0
  335. mindspore/runtime/event.py +209 -0
  336. mindspore/runtime/executor.py +148 -0
  337. mindspore/runtime/memory.py +392 -0
  338. mindspore/runtime/stream.py +460 -0
  339. mindspore/runtime/thread_bind_core.py +401 -0
  340. mindspore/swresample-4.dll +0 -0
  341. mindspore/swscale-6.dll +0 -0
  342. mindspore/tinyxml2.dll +0 -0
  343. mindspore/train/__init__.py +2 -2
  344. mindspore/train/_utils.py +53 -18
  345. mindspore/train/amp.py +8 -4
  346. mindspore/train/callback/_checkpoint.py +32 -18
  347. mindspore/train/callback/_early_stop.py +1 -1
  348. mindspore/train/callback/_flops_collector.py +105 -69
  349. mindspore/train/callback/_history.py +1 -1
  350. mindspore/train/callback/_summary_collector.py +44 -6
  351. mindspore/train/callback/_tft_register.py +37 -15
  352. mindspore/train/dataset_helper.py +11 -11
  353. mindspore/train/metrics/precision.py +4 -5
  354. mindspore/train/mind_ir_pb2.py +167 -46
  355. mindspore/train/model.py +13 -14
  356. mindspore/train/serialization.py +461 -72
  357. mindspore/train/summary/summary_record.py +1 -2
  358. mindspore/train/train_thor/model_thor.py +1 -1
  359. mindspore/turbojpeg.dll +0 -0
  360. mindspore/utils/__init__.py +4 -2
  361. mindspore/utils/dryrun.py +138 -0
  362. mindspore/utils/runtime_execution_order_check.py +550 -0
  363. mindspore/version.py +1 -1
  364. {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/METADATA +3 -4
  365. {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/RECORD +368 -242
  366. {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/entry_points.txt +1 -1
  367. mindspore/common/_tensor_overload.py +0 -139
  368. mindspore/mindspore_np_dtype.dll +0 -0
  369. mindspore/profiler/envprofiling.py +0 -254
  370. mindspore/profiler/profiling.py +0 -1926
  371. {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/WHEEL +0 -0
  372. {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,797 @@
1
+ # Copyright 2024 Huawei Technologies Co., Ltd
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ============================================================================
15
+ """activation layer for mint"""
16
+ from __future__ import absolute_import
17
+
18
+ from mindspore import mint
19
+ from mindspore.nn.cell import Cell
20
+ from mindspore import _checkparam as validator
21
+
22
+
23
+ class ConstantPadNd_(Cell):
24
+ """
25
+ Base class for N-dimensional constant padding.
26
+ """
27
+ def __init__(self, padding, value=None, padding_length=None):
28
+ super(ConstantPadNd_, self).__init__()
29
+ self.padding = padding
30
+ self.value = value
31
+
32
+ if isinstance(self.padding, int):
33
+ validator.check_positive_int(self.padding, "padding", self.cls_name)
34
+ self.padding = (self.padding,) * padding_length
35
+ elif isinstance(self.padding, (tuple, list)):
36
+ if len(padding) != padding_length:
37
+ msg = f"For '{self.cls_name}', the length of parameter 'padding' with tuple " \
38
+ f"type must equal to {padding_length}, but got {len(padding)}."
39
+ raise ValueError(msg)
40
+ validator.check_non_negative_int_sequence(self.padding, "padding", self.cls_name)
41
+ else:
42
+ msg = f"For '{self.cls_name}', 'padding' must be positive integer or tuple/list of {padding_length}" \
43
+ f" positive integers, but got {padding}."
44
+ raise ValueError(msg)
45
+
46
+ def construct(self, input):
47
+ return mint.nn.functional.pad(input, self.padding, mode='constant', value=self.value)
48
+
49
+
50
+ class ConstantPad1d(ConstantPadNd_):
51
+ r"""
52
+ Pad the last dimension of `input` tensor using `padding` and `value`.
53
+
54
+ For more information, please refer to :func:`mindspore.mint.nn.functional.pad`.
55
+
56
+ .. warning::
57
+ This is an experimental API that is subject to change or deletion.
58
+
59
+ Args:
60
+ padding (Union[int, tuple, list]): Specifies padding size.
61
+ value (Union[int, float]): Specifies padding value.
62
+
63
+ Inputs:
64
+ - **input** (Tensor) - shape is :math:`(N, *)`, where :math:`*` means, any number of additional dimensions.
65
+
66
+ Outputs:
67
+ Tensor, the tensor after padding.
68
+
69
+ Raises:
70
+ TypeError: If `padding` is not an integer of a list or tuple of 2 integers.
71
+ TypeError: If `input` is not Tensor.
72
+ TypeError: If `value` is not int or float.
73
+ ValueError: If `padding` contains negative value.
74
+ ValueError: If `padding` is a tuple or list, and the length does not match the tensor dimension.
75
+
76
+ Supported Platforms:
77
+ ``Ascend``
78
+
79
+ Examples:
80
+ >>> import numpy as np
81
+ >>> import mindspore as ms
82
+ >>> x = np.ones(shape=(1, 2, 3, 4)).astype(np.float32)
83
+ >>> x = ms.Tensor(x)
84
+ >>> # padding is tuple
85
+ >>> padding = (0, 1)
86
+ >>> value = 0.5
87
+ >>> pad1d = ms.mint.nn.ConstantPad1d(padding, value)
88
+ >>> out = pad1d(x)
89
+ >>> print(out)
90
+ [[[[1. 1. 1. 1. 0.5]
91
+ [1. 1. 1. 1. 0.5]
92
+ [1. 1. 1. 1. 0.5]]
93
+ [[1. 1. 1. 1. 0.5]
94
+ [1. 1. 1. 1. 0.5]
95
+ [1. 1. 1. 1. 0.5]]]]
96
+ >>> print(out.shape)
97
+ (1, 2, 3, 5)
98
+ >>> # padding is int
99
+ >>> padding = 1
100
+ >>> value = 0.5
101
+ >>> pad1d = ms.mint.nn.ConstantPad1d(padding, value)
102
+ >>> out = pad1d(x)
103
+ >>> print(out)
104
+ [[[[0.5 1. 1. 1. 1. 0.5]
105
+ [0.5 1. 1. 1. 1. 0.5]
106
+ [0.5 1. 1. 1. 1. 0.5]]
107
+ [[0.5 1. 1. 1. 1. 0.5]
108
+ [0.5 1. 1. 1. 1. 0.5]
109
+ [0.5 1. 1. 1. 1. 0.5]]]]
110
+ >>> print(out.shape)
111
+ (1, 2, 3, 6)
112
+ """
113
+
114
+ def __init__(self, padding, value):
115
+ super(ConstantPad1d, self).__init__(padding, value, padding_length=2)
116
+
117
+
118
+ class ConstantPad2d(ConstantPadNd_):
119
+ """
120
+ Pad the last 2 dimensions of `input` tensor using `padding` and `value`.
121
+
122
+ For more information, please refer to :func:`mindspore.mint.nn.functional.pad`.
123
+
124
+ .. warning::
125
+ This is an experimental API that is subject to change or deletion.
126
+
127
+ Args:
128
+ padding (Union[int, tuple, list]): Specifies padding size.
129
+ value (Union[int, float]): Specifies padding value.
130
+
131
+ Inputs:
132
+ - **input** (Tensor) - shape is :math:`(N, *)`, where :math:`*` means, any number of additional dimensions.
133
+
134
+ Outputs:
135
+ Tensor, the tensor after padding.
136
+
137
+ Raises:
138
+ TypeError: If `padding` is not an integer of a list or tuple of 4 integers.
139
+ TypeError: If `input` is not Tensor.
140
+ TypeError: If `value` is not int or float.
141
+ ValueError: If `padding` contains negative value.
142
+ ValueError: If `padding` is a tuple or list, and the length does not match the tensor dimension.
143
+
144
+ Supported Platforms:
145
+ ``Ascend``
146
+
147
+ Examples:
148
+ >>> import numpy as np
149
+ >>> import mindspore as ms
150
+ >>> x = np.ones(shape=(1, 2, 3, 4)).astype(np.float32)
151
+ >>> x = ms.Tensor(x)
152
+ >>> padding = (1, 1, 0, 1)
153
+ >>> value = 0.5
154
+ >>> pad2d = ms.mint.nn.ConstantPad2d(padding, value)
155
+ >>> out = pad2d(x)
156
+ >>> print(out)
157
+ [[[[0.5 1. 1. 1. 1. 0.5]
158
+ [0.5 1. 1. 1. 1. 0.5]
159
+ [0.5 1. 1. 1. 1. 0.5]
160
+ [0.5 0.5 0.5 0.5 0.5 0.5]]
161
+ [[0.5 1. 1. 1. 1. 0.5]
162
+ [0.5 1. 1. 1. 1. 0.5]
163
+ [0.5 1. 1. 1. 1. 0.5]
164
+ [0.5 0.5 0.5 0.5 0.5 0.5]]]]
165
+ >>> print(out.shape)
166
+ (1, 2, 4, 6)
167
+ """
168
+
169
+ def __init__(self, padding, value):
170
+ super(ConstantPad2d, self).__init__(padding, value, padding_length=4)
171
+
172
+
173
+ class ConstantPad3d(ConstantPadNd_):
174
+ """
175
+ Pad the last 3 dimension of `input` tensor using `padding` and `value`.
176
+
177
+ For more information, please refer to :func:`mindspore.mint.nn.functional.pad`.
178
+
179
+ .. warning::
180
+ This is an experimental API that is subject to change or deletion.
181
+
182
+ Args:
183
+ padding (Union[int, tuple, list]): Specifies padding size.
184
+ value (Union[int, float]): Specifies padding value.
185
+
186
+ Inputs:
187
+ - **input** (Tensor) - shape is :math:`(N, *)`, where :math:`*` means, any number of additional dimensions.
188
+
189
+ Outputs:
190
+ Tensor, the tensor after padding.
191
+
192
+ Raises:
193
+ TypeError: If `padding` is not an integer of a list or tuple of 6 integers.
194
+ TypeError: If `input` is not Tensor.
195
+ TypeError: If `value` is not int or float.
196
+ ValueError: If `padding` contains negative value.
197
+ ValueError: If `padding` is a tuple or list, and the length does not match the tensor dimension.
198
+
199
+ Supported Platforms:
200
+ ``Ascend``
201
+
202
+ Examples:
203
+ >>> import numpy as np
204
+ >>> import mindspore as ms
205
+ >>> x = np.ones(shape=(1, 2, 3, 4)).astype(np.float32)
206
+ >>> x = ms.Tensor(x)
207
+ >>> padding = (1, 1, 0, 1, 1, 0)
208
+ >>> value = 0.5
209
+ >>> pad3d = ms.mint.nn.ConstantPad3d(padding, value)
210
+ >>> out = pad3d(x)
211
+ >>> print(out)
212
+ [[[[0.5 0.5 0.5 0.5 0.5 0.5]
213
+ [0.5 0.5 0.5 0.5 0.5 0.5]
214
+ [0.5 0.5 0.5 0.5 0.5 0.5]
215
+ [0.5 0.5 0.5 0.5 0.5 0.5]]
216
+ [[0.5 1. 1. 1. 1. 0.5]
217
+ [0.5 1. 1. 1. 1. 0.5]
218
+ [0.5 1. 1. 1. 1. 0.5]
219
+ [0.5 0.5 0.5 0.5 0.5 0.5]]
220
+ [[0.5 1. 1. 1. 1. 0.5]
221
+ [0.5 1. 1. 1. 1. 0.5]
222
+ [0.5 1. 1. 1. 1. 0.5]
223
+ [0.5 0.5 0.5 0.5 0.5 0.5]]]]
224
+ >>> print(out.shape)
225
+ (1, 3, 4, 6)
226
+ """
227
+
228
+ def __init__(self, padding, value):
229
+ super(ConstantPad3d, self).__init__(padding, value, padding_length=6)
230
+
231
+
232
+ class ZeroPadNd_(ConstantPadNd_):
233
+ """
234
+ Base class for N-dimensional zero padding.
235
+ """
236
+ def __init__(self, padding, padding_length):
237
+ super(ZeroPadNd_, self).__init__(padding, value=0, padding_length=padding_length)
238
+
239
+
240
+ class ZeroPad1d(ZeroPadNd_):
241
+ """
242
+ Pad the last dimension of `input` tensor with 0 using `padding`.
243
+
244
+ For more information, please refer to :func:`mindspore.mint.nn.functional.pad`.
245
+
246
+ .. warning::
247
+ This is an experimental API that is subject to change or deletion.
248
+
249
+ Args:
250
+ padding (Union[int, tuple, list]): Specifies padding size.
251
+
252
+ Inputs:
253
+ - **input** (Tensor) - shape is :math:`(N, *)`, where :math:`*` means, any number of additional dimensions.
254
+
255
+ Outputs:
256
+ Tensor, the tensor after padding.
257
+
258
+ Raises:
259
+ TypeError: If `padding` is not an integer of a list or tuple of 2 integers.
260
+ TypeError: If `input` is not Tensor.
261
+ ValueError: If `padding` contains negative value.
262
+ ValueError: If `padding` is a tuple or list, and the length does not match the tensor dimension.
263
+
264
+ Supported Platforms:
265
+ ``Ascend``
266
+
267
+ Examples:
268
+ >>> import numpy as np
269
+ >>> import mindspore as ms
270
+ >>> x = np.ones(shape=(1, 2, 3, 4)).astype(np.float32)
271
+ >>> x = ms.Tensor(x)
272
+ >>> # padding is tuple
273
+ >>> padding = (0, 1)
274
+ >>> pad1d = ms.mint.nn.ZeroPad1d(padding)
275
+ >>> out = pad1d(x)
276
+ >>> print(out)
277
+ [[[[1. 1. 1. 1. 0.]
278
+ [1. 1. 1. 1. 0.]
279
+ [1. 1. 1. 1. 0.]]
280
+ [[1. 1. 1. 1. 0.]
281
+ [1. 1. 1. 1. 0.]
282
+ [1. 1. 1. 1. 0.]]]]
283
+ >>> print(out.shape)
284
+ (1, 2, 3, 5)
285
+ >>> # padding is int
286
+ >>> padding = 1
287
+ >>> pad1d = ms.mint.nn.ZeroPad1d(padding)
288
+ >>> out = pad1d(x)
289
+ >>> print(out)
290
+ [[[[0. 1. 1. 1. 1. 0.]
291
+ [0. 1. 1. 1. 1. 0.]
292
+ [0. 1. 1. 1. 1. 0.]]
293
+ [[0. 1. 1. 1. 1. 0.]
294
+ [0. 1. 1. 1. 1. 0.]
295
+ [0. 1. 1. 1. 1. 0.]]]]
296
+ >>> print(out.shape)
297
+ (1, 2, 3, 6)
298
+ """
299
+
300
+ def __init__(self, padding):
301
+ super(ZeroPad1d, self).__init__(padding, padding_length=2)
302
+
303
+
304
+ class ZeroPad2d(ZeroPadNd_):
305
+ """
306
+ Pad the last 2 dimension of `input` tensor with 0 using `padding`.
307
+
308
+ For more information, please refer to :func:`mindspore.mint.nn.functional.pad`.
309
+
310
+ .. warning::
311
+ This is an experimental API that is subject to change or deletion.
312
+
313
+ Args:
314
+ padding (Union[int, tuple, list]): Specifies padding size.
315
+
316
+ Inputs:
317
+ - **input** (Tensor) - shape is :math:`(N, *)`, where :math:`*` means, any number of additional dimensions.
318
+
319
+ Outputs:
320
+ Tensor, the tensor after padding.
321
+
322
+ Raises:
323
+ TypeError: If `padding` is not an integer of a list or tuple of 4 integers.
324
+ TypeError: If `input` is not Tensor.
325
+ ValueError: If `padding` contains negative value.
326
+ ValueError: If `padding` is a tuple or list, and the length does not match the tensor dimension.
327
+
328
+ Supported Platforms:
329
+ ``Ascend``
330
+
331
+ Examples:
332
+ >>> import numpy as np
333
+ >>> import mindspore as ms
334
+ >>> x = np.ones(shape=(1, 2, 3, 4)).astype(np.float32)
335
+ >>> x = ms.Tensor(x)
336
+ >>> padding = (1, 1, 0, 1)
337
+ >>> pad = ms.mint.nn.ZeroPad2d(padding)
338
+ >>> out = pad(x)
339
+ >>> print(out)
340
+ [[[[0. 1. 1. 1. 1. 0.]
341
+ [0. 1. 1. 1. 1. 0.]
342
+ [0. 1. 1. 1. 1. 0.]
343
+ [0. 0. 0. 0. 0. 0.]]
344
+ [[0. 1. 1. 1. 1. 0.]
345
+ [0. 1. 1. 1. 1. 0.]
346
+ [0. 1. 1. 1. 1. 0.]
347
+ [0. 0. 0. 0. 0. 0.]]]]
348
+ >>> print(out.shape)
349
+ (1, 2, 4, 6)
350
+ """
351
+
352
+ def __init__(self, padding):
353
+ super(ZeroPad2d, self).__init__(padding, padding_length=4)
354
+
355
+
356
+ class ZeroPad3d(ZeroPadNd_):
357
+ """
358
+ Pad the last 3 dimension of `input` tensor with 0 using `padding`.
359
+
360
+ For more information, please refer to :func:`mindspore.mint.nn.functional.pad`.
361
+
362
+ .. warning::
363
+ This is an experimental API that is subject to change or deletion.
364
+
365
+ Args:
366
+ padding (Union[int, tuple, list]): Specifies padding size.
367
+
368
+ Inputs:
369
+ - **input** (Tensor) - shape is :math:`(N, *)`, where :math:`*` means, any number of additional dimensions.
370
+
371
+ Outputs:
372
+ Tensor, the tensor after padding.
373
+
374
+ Raises:
375
+ TypeError: If `padding` is not an integer of a list or tuple of 6 integers.
376
+ TypeError: If `input` is not Tensor.
377
+ ValueError: If `padding` contains negative value.
378
+ ValueError: If `padding` is a tuple or list, and the length does not match the tensor dimension.
379
+
380
+ Supported Platforms:
381
+ ``Ascend``
382
+
383
+ Examples:
384
+ >>> import numpy as np
385
+ >>> import mindspore as ms
386
+ >>> x = np.ones(shape=(1, 2, 3, 4)).astype(np.float32)
387
+ >>> x = ms.Tensor(x)
388
+ >>> padding = (1, 1, 0, 1, 1, 0)
389
+ >>> pad3d = ms.mint.nn.ZeroPad3d(padding)
390
+ >>> out = pad3d(x)
391
+ >>> print(out)
392
+ [[[[0. 0. 0. 0. 0. 0.]
393
+ [0. 0. 0. 0. 0. 0.]
394
+ [0. 0. 0. 0. 0. 0.]
395
+ [0. 0. 0. 0. 0. 0.]]
396
+ [[0. 1. 1. 1. 1. 0.]
397
+ [0. 1. 1. 1. 1. 0.]
398
+ [0. 1. 1. 1. 1. 0.]
399
+ [0. 0. 0. 0. 0. 0.]]
400
+ [[0. 1. 1. 1. 1. 0.]
401
+ [0. 1. 1. 1. 1. 0.]
402
+ [0. 1. 1. 1. 1. 0.]
403
+ [0. 0. 0. 0. 0. 0.]]]]
404
+ >>> print(out.shape)
405
+ (1, 3, 4, 6)
406
+ """
407
+
408
+ def __init__(self, padding):
409
+ super(ZeroPad3d, self).__init__(padding, padding_length=6)
410
+
411
+
412
+ class ReflectionPadNd_(Cell):
413
+ """
414
+ Base class for N-dimensional reflection padding.
415
+ """
416
+ def __init__(self, padding, padding_length=None):
417
+ super(ReflectionPadNd_, self).__init__()
418
+ self.padding = padding
419
+
420
+ if isinstance(self.padding, int):
421
+ validator.check_positive_int(self.padding, "padding", self.cls_name)
422
+ self.padding = (self.padding,) * padding_length
423
+ elif isinstance(self.padding, (tuple, list)):
424
+ if len(padding) != padding_length:
425
+ msg = f"For '{self.cls_name}', the length of parameter 'padding' with tuple type must " \
426
+ f"equal to {padding_length}, but got {len(padding)}."
427
+ raise ValueError(msg)
428
+ validator.check_non_negative_int_sequence(self.padding, "padding", self.cls_name)
429
+ else:
430
+ msg = f"For '{self.cls_name}', 'padding' must be positive integer or tuple/list of {padding_length}" \
431
+ f" positive integers, but got {padding}."
432
+ raise ValueError(msg)
433
+
434
+ def construct(self, input):
435
+ return mint.nn.functional.pad(input, self.padding, mode='reflect')
436
+
437
+
438
+ class ReflectionPad1d(ReflectionPadNd_):
439
+ """
440
+ Pad the last dimension of `input` tensor using the reflection of the input boundary.
441
+
442
+ For more information, please refer to :func:`mindspore.mint.nn.functional.pad`.
443
+
444
+ .. warning::
445
+ This is an experimental API that is subject to change or deletion.
446
+
447
+ Args:
448
+ padding (Union[int, tuple, list]): Specifies padding size.
449
+
450
+ Inputs:
451
+ - **input** (Tensor) - 2D or 3D input Tensor with shape: :math:`(C, W_{in})` or :math:`(N, C, W_{in})`.
452
+
453
+ Outputs:
454
+ Tensor, the tensor after padding.
455
+
456
+ Raises:
457
+ TypeError: If `padding` is not an integer of a list or tuple of 2 integers.
458
+ TypeError: If `input` is not Tensor.
459
+ ValueError: If `padding` contains negative value.
460
+ ValueError: If `padding` is a tuple or list, and the length does not match the tensor dimension.
461
+
462
+ Supported Platforms:
463
+ ``Ascend``
464
+
465
+ Examples:
466
+ >>> import numpy as np
467
+ >>> import mindspore as ms
468
+ >>> x = ms.Tensor(np.array([[[0, 1, 2, 3], [4, 5, 6, 7]]]).astype(np.float32))
469
+ >>> # x has shape (1, 2, 4)
470
+ >>> padding = (3, 1)
471
+ >>> # The first and the second dimension of x remain the same.
472
+ >>> # The third dimension of x: W_out = W_in + pad_left + pad_right = 4 + 3 + 1 = 8
473
+ >>> pad1d = ms.mint.nn.ReflectionPad1d(padding)
474
+ >>> out = pad1d(x)
475
+ >>> # The shape of out is (1, 2, 8)
476
+ >>> print(out)
477
+ [[[3. 2. 1. 0. 1. 2. 3. 2.]
478
+ [7. 6. 5. 4. 5. 6. 7. 6.]]]
479
+ """
480
+
481
+ def __init__(self, padding):
482
+ super(ReflectionPad1d, self).__init__(padding, padding_length=2)
483
+
484
+
485
+ class ReflectionPad2d(ReflectionPadNd_):
486
+ """
487
+ Pad the last 2 dimension of `input` tensor using the reflection of the input boundary.
488
+
489
+ For more information, please refer to :func:`mindspore.mint.nn.functional.pad`.
490
+
491
+ .. warning::
492
+ This is an experimental API that is subject to change or deletion.
493
+
494
+ Args:
495
+ padding (Union[int, tuple, list]): Specifies padding size.
496
+
497
+ Inputs:
498
+ - **input** (Tensor) - 3D or 4D input Tensor with shape: :math:`(C, H_{in}, W_{in})`
499
+ or :math:`(N, C, H_{in}, W_{in})`.
500
+
501
+ Outputs:
502
+ Tensor, the tensor after padding.
503
+
504
+ Raises:
505
+ TypeError: If `padding` is not an integer of a list or tuple of 4 integers.
506
+ TypeError: If `input` is not Tensor.
507
+ ValueError: If `padding` contains negative value.
508
+ ValueError: If `padding` is a tuple or list, and the length does not match the tensor dimension.
509
+
510
+ Supported Platforms:
511
+ ``Ascend``
512
+
513
+ Examples:
514
+ >>> import numpy as np
515
+ >>> import mindspore as ms
516
+ >>> x = ms.Tensor(np.array([[[0, 1, 2], [3, 4, 5], [6, 7, 8]]]).astype(np.float32))
517
+ >>> # x has shape (1, 3, 3)
518
+ >>> padding = (1, 1, 2, 0)
519
+ >>> pad2d = ms.mint.nn.ReflectionPad2d(padding)
520
+ >>> # The first dimension of x remains the same.
521
+ >>> # The second dimension of x: H_out = H_in + pad_up + pad_down = 3 + 1 + 1 = 5
522
+ >>> # The third dimension of x: W_out = W_in + pad_left + pad_right = 3 + 2 + 0 = 5
523
+ >>> out = pad2d(x)
524
+ >>> # The shape of out is (1, 5, 5)
525
+ >>> print(out)
526
+ [[[7. 6. 7. 8. 7.]
527
+ [4. 3. 4. 5. 4.]
528
+ [1. 0. 1. 2. 1.]
529
+ [4. 3. 4. 5. 4.]
530
+ [7. 6. 7. 8. 7.]]]
531
+ """
532
+
533
+ def __init__(self, padding):
534
+ super(ReflectionPad2d, self).__init__(padding, padding_length=4)
535
+
536
+
537
+ class ReflectionPad3d(ReflectionPadNd_):
538
+ """
539
+ Pad the last 3 dimension of `input` tensor using the reflection of the input boundary.
540
+
541
+ For more information, please refer to :func:`mindspore.mint.nn.functional.pad`.
542
+
543
+ .. warning::
544
+ This is an experimental API that is subject to change or deletion.
545
+
546
+ Args:
547
+ padding (Union[int, tuple, list]): Specifies padding size.
548
+
549
+ Inputs:
550
+ - **input** (Tensor) - 4D or 5D input Tensor with shape: :math:`(N, D_{in}, H_{in}, W_{in})`
551
+ or :math:`(N, C, D_{in}, H_{in}, W_{in})`.
552
+
553
+ Outputs:
554
+ Tensor, the tensor after padding.
555
+
556
+ Raises:
557
+ TypeError: If `padding` is not an integer of a list or tuple of 6 integers.
558
+ TypeError: If `input` is not Tensor.
559
+ ValueError: If `padding` contains negative value.
560
+ ValueError: If `padding` is a tuple or list, and the length does not match the tensor dimension.
561
+
562
+ Supported Platforms:
563
+ ``Ascend``
564
+
565
+ Examples:
566
+ >>> import numpy as np
567
+ >>> import mindspore as ms
568
+ >>> arr = np.arange(8).astype(np.float32).reshape((1, 2, 2, 2))
569
+ >>> x = ms.Tensor(arr)
570
+ >>> # x has shape (1, 2, 2, 2)
571
+ >>> padding = (1, 1, 1, 0, 0, 1)
572
+ >>> pad3d = ms.mint.nn.ReflectionPad3d(padding)
573
+ >>> out = pad3d(x)
574
+ >>> # The first dimension of x remains the same.
575
+ >>> # The second dimension of x: D_out = D_in + pad_front + pad_back = 2 + 0 + 1 = 3
576
+ >>> # The third dimension of x: H_out = H_in + pad_up + pad_down = 2 + 1 + 0 = 3
577
+ >>> # The last dimension of x: W_out = W_in + pad_left + pad_right = 2 + 1 + 1 = 4
578
+ >>> # The shape of out is (1, 3, 3, 4)
579
+ >>> print(out)
580
+ [[[[3. 2. 3. 2.]
581
+ [1. 0. 1. 0.]
582
+ [3. 2. 3. 2.]]
583
+ [[7. 6. 7. 6.]
584
+ [5. 4. 5. 4.]
585
+ [7. 6. 7. 6.]]
586
+ [[3. 2. 3. 2.]
587
+ [1. 0. 1. 0.]
588
+ [3. 2. 3. 2.]]]]
589
+ """
590
+
591
+ def __init__(self, padding):
592
+ super(ReflectionPad3d, self).__init__(padding, padding_length=6)
593
+
594
+
595
+ class ReplicationPadNd_(Cell):
596
+ """
597
+ Base class for N-dimensional replication padding.
598
+ """
599
+ def __init__(self, padding, padding_length=None):
600
+ super(ReplicationPadNd_, self).__init__()
601
+ self.padding = padding
602
+
603
+ if isinstance(self.padding, int):
604
+ validator.check_positive_int(self.padding, "padding", self.cls_name)
605
+ self.padding = (self.padding,) * padding_length
606
+ elif isinstance(self.padding, (tuple, list)):
607
+ if len(padding) != padding_length:
608
+ msg = f"For '{self.cls_name}', the length of parameter 'padding' with tuple type must " \
609
+ f"equal to {padding_length}, but got {len(padding)}."
610
+ raise ValueError(msg)
611
+ validator.check_non_negative_int_sequence(self.padding, "padding", self.cls_name)
612
+ else:
613
+ msg = f"For '{self.cls_name}', 'padding' must be positive integer or tuple/list of {padding_length} " \
614
+ f"positive integers, but got {padding}."
615
+ raise ValueError(msg)
616
+
617
+ def construct(self, input):
618
+ return mint.nn.functional.pad(input, self.padding, mode='replicate')
619
+
620
+
621
+ class ReplicationPad1d(ReplicationPadNd_):
622
+ """
623
+ Pad the last dimension of `input` tensor using the replication of the input boundary.
624
+
625
+ For more information, please refer to :func:`mindspore.mint.nn.functional.pad`.
626
+
627
+ .. warning::
628
+ This is an experimental API that is subject to change or deletion.
629
+
630
+ Args:
631
+ padding (Union[int, tuple, list]): Specifies padding size.
632
+
633
+ Inputs:
634
+ - **input** (Tensor) - 2D or 3D input Tensor with shape: :math:`(C, W_{in})` or :math:`(N, C, W_{in})`.
635
+
636
+ Outputs:
637
+ Tensor, the tensor after padding.
638
+
639
+ Raises:
640
+ TypeError: If `padding` is not an integer of a list or tuple of 2 integers.
641
+ TypeError: If `input` is not Tensor.
642
+ ValueError: If `padding` is a tuple or list, and the length does not match the tensor dimension.
643
+
644
+ Supported Platforms:
645
+ ``Ascend``
646
+
647
+ Examples:
648
+ >>> import numpy as np
649
+ >>> import mindspore as ms
650
+ >>> pad1d = ms.mint.nn.ReplicationPad1d(2)
651
+ >>> input = ms.Tensor(np.arange(0, 8).reshape(1, 2, 4), ms.float32)
652
+ >>> print(input)
653
+ [[[0. 1. 2. 3.]
654
+ [4. 5. 6. 7.]]]
655
+ >>> out = pad1d(input)
656
+ >>> print(out)
657
+ [[[0. 0. 0. 1. 2. 3. 3. 3.]
658
+ [4. 4. 4. 5. 6. 7. 7. 7.]]]
659
+ >>> pad1d = ms.mint.nn.ReplicationPad1d((3, 1))
660
+ >>> out = pad1d(input)
661
+ >>> print(out)
662
+ [[[0. 0. 0. 0. 1. 2. 3. 3.]
663
+ [4. 4. 4. 4. 5. 6. 7. 7.]]]
664
+ """
665
+
666
+ def __init__(self, padding):
667
+ super(ReplicationPad1d, self).__init__(padding, padding_length=2)
668
+
669
+
670
+ class ReplicationPad2d(ReplicationPadNd_):
671
+ """
672
+ Pad the last 2 dimension of `input` tensor using the replication of the input boundary.
673
+
674
+ For more information, please refer to :func:`mindspore.mint.nn.functional.pad`.
675
+
676
+ .. warning::
677
+ This is an experimental API that is subject to change or deletion.
678
+
679
+ Args:
680
+ padding (Union[int, tuple, list]): Specifies padding size.
681
+
682
+ Inputs:
683
+ - **input** (Tensor) - 3D or 4D input Tensor with shape: :math:`(C, H_{in}, W_{in})`
684
+ or :math:`(N, C, H_{in}, W_{in})`.
685
+
686
+ Outputs:
687
+ Tensor, the tensor after padding.
688
+
689
+ Raises:
690
+ TypeError: If `padding` is not an integer of a list or tuple of 4 integers.
691
+ TypeError: If `input` is not Tensor.
692
+ ValueError: If `padding` is a tuple or list, and the length does not match the tensor dimension.
693
+
694
+ Supported Platforms:
695
+ ``Ascend``
696
+
697
+ Examples:
698
+ >>> import numpy as np
699
+ >>> import mindspore as ms
700
+ >>> pad2d = ms.mint.nn.ReplicationPad2d(2)
701
+ >>> input = ms.Tensor(np.arange(0, 9).reshape(1, 1, 3, 3), ms.float32)
702
+ >>> print(input)
703
+ [[[[0. 1. 2.]
704
+ [3. 4. 5.]
705
+ [6. 7. 8.]]]]
706
+ >>> out = pad2d(input)
707
+ >>> print(out)
708
+ [[[[0. 0. 0. 1. 2. 2. 2.]
709
+ [0. 0. 0. 1. 2. 2. 2.]
710
+ [0. 0. 0. 1. 2. 2. 2.]
711
+ [3. 3. 3. 4. 5. 5. 5.]
712
+ [6. 6. 6. 7. 8. 8. 8.]
713
+ [6. 6. 6. 7. 8. 8. 8.]
714
+ [6. 6. 6. 7. 8. 8. 8.]]]]
715
+ >>> pad2d = ms.mint.nn.ReplicationPad2d((1, 1, 2, 0))
716
+ >>> out = pad2d(input)
717
+ >>> print(out)
718
+ [[[[0. 0. 1. 2. 2.]
719
+ [0. 0. 1. 2. 2.]
720
+ [0. 0. 1. 2. 2.]
721
+ [3. 3. 4. 5. 5.]
722
+ [6. 6. 7. 8. 8.]]]]
723
+ """
724
+
725
+ def __init__(self, padding):
726
+ super(ReplicationPad2d, self).__init__(padding, padding_length=4)
727
+
728
+
729
+ class ReplicationPad3d(ReplicationPadNd_):
730
+ """
731
+ Pad the last 3 dimension of `input` tensor using the replication of the input boundary.
732
+
733
+ For more information, please refer to :func:`mindspore.mint.nn.functional.pad`.
734
+
735
+ .. warning::
736
+ This is an experimental API that is subject to change or deletion.
737
+
738
+ Args:
739
+ padding (Union[int, tuple, list]): Specifies padding size.
740
+
741
+ Inputs:
742
+ - **input** (Tensor) - 4D or 5D input Tensor with shape: :math:`(N, D_{in}, H_{in}, W_{in})` or
743
+ :math:`(N, C, D_{in}, H_{in}, W_{in})`.
744
+
745
+ Outputs:
746
+ Tensor, the tensor after padding.
747
+
748
+ Raises:
749
+ TypeError: If `padding` is not an integer of a list or tuple of 6 integers.
750
+ TypeError: If `input` is not Tensor.
751
+ ValueError: If `padding` is a tuple or list, and the length does not match the tensor dimension.
752
+
753
+ Supported Platforms:
754
+ ``Ascend``
755
+
756
+ Examples:
757
+ >>> import numpy as np
758
+ >>> import mindspore as ms
759
+ >>> pad3d = ms.mint.nn.ReplicationPad3d(1)
760
+ >>> input = ms.Tensor(np.arange(0, 9).reshape(1, 1, 1, 3, 3), ms.float32)
761
+ >>> out = pad3d(input)
762
+ >>> print(out)
763
+ [[[[[0. 0. 1. 2. 2.]
764
+ [0. 0. 1. 2. 2.]
765
+ [3. 3. 4. 5. 5.]
766
+ [6. 6. 7. 8. 8.]
767
+ [6. 6. 7. 8. 8.]]
768
+ [[0. 0. 1. 2. 2.]
769
+ [0. 0. 1. 2. 2.]
770
+ [3. 3. 4. 5. 5.]
771
+ [6. 6. 7. 8. 8.]
772
+ [6. 6. 7. 8. 8.]]
773
+ [[0. 0. 1. 2. 2.]
774
+ [0. 0. 1. 2. 2.]
775
+ [3. 3. 4. 5. 5.]
776
+ [6. 6. 7. 8. 8.]
777
+ [6. 6. 7. 8. 8.]]]]]
778
+ """
779
+
780
+ def __init__(self, padding):
781
+ super(ReplicationPad3d, self).__init__(padding, padding_length=6)
782
+
783
+
784
+ __all__ = [
785
+ 'ConstantPad1d',
786
+ 'ConstantPad2d',
787
+ 'ConstantPad3d',
788
+ 'ZeroPad1d',
789
+ 'ZeroPad2d',
790
+ 'ZeroPad3d',
791
+ 'ReflectionPad1d',
792
+ 'ReflectionPad2d',
793
+ 'ReflectionPad3d',
794
+ 'ReplicationPad1d',
795
+ 'ReplicationPad2d',
796
+ 'ReplicationPad3d',
797
+ ]