mindspore 2.6.0__cp310-cp310-win_amd64.whl → 2.7.0__cp310-cp310-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/Microsoft.VisualStudio.Telemetry.dll +0 -0
- mindspore/Newtonsoft.Json.dll +0 -0
- mindspore/__init__.py +2 -2
- mindspore/_c_dataengine.cp310-win_amd64.pyd +0 -0
- mindspore/_c_expression.cp310-win_amd64.pyd +0 -0
- mindspore/_c_mindrecord.cp310-win_amd64.pyd +0 -0
- mindspore/_checkparam.py +42 -11
- mindspore/_extends/builtin_operations.py +3 -3
- mindspore/{_deprecated → _extends/optimize}/__init__.py +9 -3
- mindspore/_extends/optimize/cell_utils.py +96 -0
- mindspore/_extends/parallel_compile/akg_compiler/custom.py +1109 -0
- mindspore/_extends/parallel_compile/akg_compiler/gen_custom_op_files.py +1 -1
- mindspore/_extends/parse/__init__.py +3 -3
- mindspore/_extends/parse/compile_config.py +44 -22
- mindspore/_extends/parse/deprecated/deprecated_tensor_method.py +1 -2
- mindspore/_extends/parse/parser.py +64 -83
- mindspore/_extends/parse/resources.py +39 -0
- mindspore/_extends/parse/standard_method.py +47 -14
- mindspore/_extends/parse/trope.py +8 -1
- mindspore/_extends/pijit/__init__.py +1 -2
- mindspore/_extends/pijit/pijit_func_white_list.py +2 -5
- mindspore/amp.py +4 -22
- mindspore/atlprov.dll +0 -0
- mindspore/avcodec-59.dll +0 -0
- mindspore/avdevice-59.dll +0 -0
- mindspore/avfilter-8.dll +0 -0
- mindspore/avformat-59.dll +0 -0
- mindspore/avutil-57.dll +0 -0
- mindspore/boost/adasum.py +1 -1
- mindspore/boost/boost_cell_wrapper.py +4 -4
- mindspore/c1.dll +0 -0
- mindspore/c1xx.dll +0 -0
- mindspore/c2.dll +0 -0
- mindspore/common/__init__.py +43 -12
- mindspore/common/_grad_function.py +2 -1
- mindspore/common/_pijit_context.py +28 -7
- mindspore/common/_stub_tensor.py +1 -209
- mindspore/common/_tensor_cpp_method.py +1 -1
- mindspore/common/_tensor_docs.py +177 -52
- mindspore/common/_utils.py +9 -1
- mindspore/common/api.py +338 -208
- mindspore/common/dtype.py +108 -57
- mindspore/common/dump.py +11 -16
- mindspore/common/dynamic_shape/__init__.py +0 -0
- mindspore/common/{auto_dynamic_shape.py → dynamic_shape/auto_dynamic_shape.py} +17 -23
- mindspore/common/dynamic_shape/enable_dynamic.py +197 -0
- mindspore/common/file_system.py +59 -9
- mindspore/common/generator.py +2 -3
- mindspore/common/hook_handle.py +33 -5
- mindspore/common/jit_config.py +1 -1
- mindspore/common/jit_trace.py +84 -105
- mindspore/common/np_dtype.py +3 -3
- mindspore/common/parameter.py +27 -29
- mindspore/common/recompute.py +5 -7
- mindspore/common/sparse_tensor.py +0 -3
- mindspore/common/symbol.py +0 -1
- mindspore/common/tensor.py +84 -133
- mindspore/communication/_comm_helper.py +46 -4
- mindspore/communication/management.py +79 -7
- mindspore/context.py +47 -38
- mindspore/dataset/__init__.py +1 -1
- mindspore/dataset/audio/transforms.py +1 -1
- mindspore/dataset/core/config.py +38 -4
- mindspore/dataset/engine/datasets.py +350 -322
- mindspore/dataset/engine/datasets_user_defined.py +69 -23
- mindspore/dataset/engine/iterators.py +2 -2
- mindspore/dataset/engine/obs/config_loader.py +2 -2
- mindspore/dataset/engine/obs/obs_mindrecord_dataset.py +8 -0
- mindspore/dataset/transforms/c_transforms.py +2 -2
- mindspore/dataset/transforms/py_transforms.py +7 -3
- mindspore/dataset/transforms/transforms.py +10 -6
- mindspore/dataset/vision/__init__.py +1 -1
- mindspore/dataset/vision/py_transforms.py +8 -8
- mindspore/dataset/vision/transforms.py +17 -5
- mindspore/dataset/vision/utils.py +632 -21
- mindspore/dataset/vision/validators.py +1 -0
- mindspore/device_context/ascend/device.py +1 -1
- mindspore/device_context/ascend/op_tuning.py +35 -1
- mindspore/device_context/gpu/__init__.py +2 -2
- mindspore/device_context/gpu/device.py +1 -1
- mindspore/device_context/gpu/op_precision.py +4 -2
- mindspore/device_context/gpu/op_tuning.py +6 -3
- mindspore/device_manager.py +16 -9
- mindspore/dnnl.dll +0 -0
- mindspore/dpcmi.dll +0 -0
- mindspore/experimental/llm_boost/ascend_native/llama_boost_ascend_native.py +5 -4
- mindspore/experimental/llm_boost/atb/boost_base.py +2 -3
- mindspore/experimental/optim/adadelta.py +13 -20
- mindspore/experimental/optim/adagrad.py +15 -22
- mindspore/experimental/optim/adam.py +17 -24
- mindspore/experimental/optim/adamax.py +14 -22
- mindspore/experimental/optim/adamw.py +28 -34
- mindspore/experimental/optim/asgd.py +15 -25
- mindspore/experimental/optim/lr_scheduler.py +27 -45
- mindspore/experimental/optim/nadam.py +14 -24
- mindspore/experimental/optim/optimizer.py +13 -23
- mindspore/experimental/optim/radam.py +18 -24
- mindspore/experimental/optim/rmsprop.py +14 -25
- mindspore/experimental/optim/rprop.py +15 -26
- mindspore/experimental/optim/sgd.py +9 -19
- mindspore/hal/__init__.py +4 -4
- mindspore/hal/contiguous_tensors_handle.py +2 -2
- mindspore/hal/memory.py +1 -0
- mindspore/include/api/cell.h +65 -5
- mindspore/include/api/cfg.h +24 -7
- mindspore/include/api/context.h +1 -0
- mindspore/include/api/delegate.h +10 -2
- mindspore/include/api/dual_abi_helper.h +100 -19
- mindspore/include/api/graph.h +14 -1
- mindspore/include/api/kernel.h +16 -3
- mindspore/include/api/kernel_api.h +9 -1
- mindspore/include/api/metrics/accuracy.h +9 -0
- mindspore/include/api/model.h +8 -1
- mindspore/include/api/model_group.h +4 -0
- mindspore/include/api/model_parallel_runner.h +2 -0
- mindspore/include/api/status.h +48 -10
- mindspore/include/api/types.h +8 -3
- mindspore/include/c_api/model_c.h +0 -58
- mindspore/include/c_api/tensor_c.h +0 -26
- mindspore/include/dataset/constants.h +9 -0
- mindspore/include/dataset/vision_ascend.h +1 -1
- mindspore/jpeg62.dll +0 -0
- mindspore/mindrecord/tools/cifar10.py +61 -11
- mindspore/mindrecord/tools/cifar10_to_mr.py +5 -0
- mindspore/mindspore_backend_common.dll +0 -0
- mindspore/mindspore_backend_manager.dll +0 -0
- mindspore/mindspore_common.dll +0 -0
- mindspore/mindspore_core.dll +0 -0
- mindspore/mindspore_cpu_res_manager.dll +0 -0
- mindspore/mindspore_dump.dll +0 -0
- mindspore/mindspore_frontend.dll +0 -0
- mindspore/mindspore_glog.dll +0 -0
- mindspore/mindspore_memory_pool.dll +0 -0
- mindspore/mindspore_ms_backend.dll +0 -0
- mindspore/mindspore_ops.dll +0 -0
- mindspore/mindspore_ops_host.dll +0 -0
- mindspore/mindspore_ops_kernel_common.dll +0 -0
- mindspore/mindspore_profiler.dll +0 -0
- mindspore/mindspore_pyboost.dll +0 -0
- mindspore/mindspore_pynative.dll +0 -0
- mindspore/mindspore_res_manager.dll +0 -0
- mindspore/mindspore_runtime_pipeline.dll +0 -0
- mindspore/mint/__init__.py +4 -44
- mindspore/mint/distributed/__init__.py +5 -0
- mindspore/mint/distributed/distributed.py +425 -19
- mindspore/mint/nn/__init__.py +1 -1
- mindspore/mint/nn/functional.py +53 -6
- mindspore/mint/nn/layer/_functions.py +163 -294
- mindspore/mint/nn/layer/activation.py +8 -6
- mindspore/mint/nn/layer/conv.py +125 -101
- mindspore/mint/nn/layer/normalization.py +11 -25
- mindspore/mint/optim/adam.py +19 -18
- mindspore/mint/optim/adamw.py +14 -8
- mindspore/mint/optim/sgd.py +5 -5
- mindspore/msobj140.dll +0 -0
- mindspore/mspdb140.dll +0 -0
- mindspore/mspdbcore.dll +0 -0
- mindspore/mspdbst.dll +0 -0
- mindspore/mspft140.dll +0 -0
- mindspore/msvcdis140.dll +0 -0
- mindspore/msvcp140_1.dll +0 -0
- mindspore/msvcp140_2.dll +0 -0
- mindspore/msvcp140_atomic_wait.dll +0 -0
- mindspore/msvcp140_codecvt_ids.dll +0 -0
- mindspore/nn/cell.py +488 -620
- mindspore/nn/grad/cell_grad.py +11 -12
- mindspore/nn/layer/activation.py +36 -36
- mindspore/nn/layer/basic.py +74 -77
- mindspore/nn/layer/channel_shuffle.py +4 -4
- mindspore/nn/layer/combined.py +4 -2
- mindspore/nn/layer/conv.py +86 -85
- mindspore/nn/layer/dense.py +9 -7
- mindspore/nn/layer/embedding.py +50 -52
- mindspore/nn/layer/image.py +38 -40
- mindspore/nn/layer/math.py +111 -112
- mindspore/nn/layer/normalization.py +56 -44
- mindspore/nn/layer/pooling.py +58 -63
- mindspore/nn/layer/rnn_cells.py +33 -33
- mindspore/nn/layer/rnns.py +56 -56
- mindspore/nn/layer/thor_layer.py +74 -73
- mindspore/nn/layer/transformer.py +11 -1
- mindspore/nn/learning_rate_schedule.py +20 -20
- mindspore/nn/loss/loss.py +79 -81
- mindspore/nn/optim/adam.py +2 -4
- mindspore/nn/optim/adasum.py +2 -2
- mindspore/nn/optim/lamb.py +1 -3
- mindspore/nn/optim/optimizer.py +1 -1
- mindspore/nn/optim/tft_wrapper.py +2 -3
- mindspore/nn/optim/thor.py +2 -2
- mindspore/nn/probability/distribution/_utils/utils.py +2 -2
- mindspore/nn/probability/distribution/exponential.py +2 -1
- mindspore/nn/probability/distribution/poisson.py +2 -1
- mindspore/nn/sparse/sparse.py +3 -3
- mindspore/nn/wrap/cell_wrapper.py +73 -42
- mindspore/nn/wrap/grad_reducer.py +37 -52
- mindspore/nn/wrap/loss_scale.py +72 -74
- mindspore/numpy/array_creations.py +7 -7
- mindspore/numpy/fft.py +1 -1
- mindspore/numpy/math_ops.py +1 -1
- mindspore/numpy/utils_const.py +1 -1
- mindspore/opencv_core452.dll +0 -0
- mindspore/opencv_imgcodecs452.dll +0 -0
- mindspore/opencv_imgproc452.dll +0 -0
- mindspore/ops/_grad_experimental/grad_comm_ops.py +51 -13
- mindspore/ops/_grad_experimental/grad_debug_ops.py +14 -0
- mindspore/ops/_grad_experimental/grad_inner_ops.py +0 -9
- mindspore/ops/_op_impl/cpu/__init__.py +1 -0
- mindspore/{experimental/es/__init__.py → ops/_op_impl/cpu/joinedstr_op.py} +12 -6
- mindspore/ops/_vmap/vmap_array_ops.py +6 -13
- mindspore/ops/_vmap/vmap_nn_ops.py +8 -16
- mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +29 -10
- mindspore/ops/auto_generate/gen_extend_func.py +5 -55
- mindspore/ops/auto_generate/gen_ops_def.py +753 -273
- mindspore/ops/auto_generate/gen_ops_prim.py +1687 -958
- mindspore/ops/auto_generate/pyboost_inner_prim.py +31 -1
- mindspore/ops/composite/__init__.py +10 -0
- mindspore/ops/composite/base.py +9 -5
- mindspore/ops/composite/multitype_ops/__init__.py +12 -1
- mindspore/ops/composite/multitype_ops/_compile_utils.py +132 -108
- mindspore/ops/composite/multitype_ops/_constexpr_utils.py +1 -1
- mindspore/ops/composite/multitype_ops/add_impl.py +70 -2
- mindspore/ops/composite/multitype_ops/div_impl.py +49 -0
- mindspore/ops/composite/multitype_ops/floordiv_impl.py +29 -0
- mindspore/ops/composite/multitype_ops/getitem_impl.py +11 -0
- mindspore/ops/composite/multitype_ops/mod_impl.py +5 -3
- mindspore/ops/composite/multitype_ops/mul_impl.py +49 -0
- mindspore/ops/composite/multitype_ops/setitem_impl.py +57 -0
- mindspore/ops/composite/multitype_ops/sub_impl.py +34 -0
- mindspore/ops/composite/multitype_ops/zeros_like_impl.py +14 -0
- mindspore/ops/function/__init__.py +4 -1
- mindspore/ops/function/_add_attr_func.py +11 -6
- mindspore/ops/function/array_func.py +17 -100
- mindspore/ops/function/debug_func.py +8 -5
- mindspore/ops/function/grad/grad_func.py +5 -13
- mindspore/ops/function/math_func.py +65 -399
- mindspore/ops/function/nn_func.py +44 -61
- mindspore/ops/function/other_func.py +4 -1
- mindspore/ops/function/random_func.py +31 -4
- mindspore/ops/functional.py +2 -3
- mindspore/ops/functional_overload.py +486 -18
- mindspore/ops/op_info_register.py +21 -0
- mindspore/ops/operations/__init__.py +5 -2
- mindspore/ops/operations/_custom_ops_utils.py +675 -8
- mindspore/ops/operations/_inner_ops.py +14 -18
- mindspore/ops/operations/_sequence_ops.py +1 -1
- mindspore/ops/operations/array_ops.py +4 -50
- mindspore/ops/operations/comm_ops.py +186 -41
- mindspore/ops/operations/custom_ops.py +244 -175
- mindspore/ops/operations/debug_ops.py +55 -4
- mindspore/ops/operations/image_ops.py +13 -13
- mindspore/ops/operations/manually_defined/ops_def.py +27 -28
- mindspore/ops/operations/math_ops.py +8 -9
- mindspore/ops/operations/nn_ops.py +6 -7
- mindspore/ops/primitive.py +9 -20
- mindspore/ops/tensor_method.py +52 -11
- mindspore/ops_generate/api/cpp_create_prim_instance_helper_generator.py +1 -1
- mindspore/ops_generate/api/functional_map_cpp_generator.py +10 -9
- mindspore/ops_generate/api/functions_cc_generator.py +58 -10
- mindspore/ops_generate/api/tensor_func_reg_cpp_generator.py +1 -1
- mindspore/ops_generate/common/base_generator.py +14 -0
- mindspore/ops_generate/common/gen_constants.py +7 -2
- mindspore/ops_generate/common/gen_utils.py +0 -19
- mindspore/ops_generate/common/op_proto.py +11 -4
- mindspore/ops_generate/common/template.py +88 -11
- mindspore/ops_generate/gen_ops.py +1 -1
- mindspore/ops_generate/op_def/lite_ops_cpp_generator.py +4 -4
- mindspore/ops_generate/op_def/ops_name_h_generator.py +0 -3
- mindspore/ops_generate/op_def/ops_primitive_h_generator.py +0 -4
- mindspore/ops_generate/op_def_py/op_prim_py_generator.py +5 -2
- mindspore/ops_generate/pyboost/auto_grad_impl_cc_generator.py +49 -8
- mindspore/ops_generate/pyboost/auto_grad_reg_cc_generator.py +2 -2
- mindspore/ops_generate/pyboost/gen_pyboost_func.py +31 -16
- mindspore/ops_generate/pyboost/op_template_parser.py +98 -72
- mindspore/ops_generate/pyboost/pyboost_functions_cpp_generator.py +70 -273
- mindspore/ops_generate/pyboost/pyboost_functions_h_generator.py +14 -6
- mindspore/ops_generate/pyboost/pyboost_functions_impl_cpp_generator.py +316 -0
- mindspore/ops_generate/pyboost/pyboost_functions_py_generator.py +1 -1
- mindspore/ops_generate/pyboost/pyboost_grad_function_cpp_generator.py +5 -3
- mindspore/ops_generate/pyboost/pyboost_inner_prim_generator.py +1 -1
- mindspore/ops_generate/pyboost/pyboost_internal_functions_cpp_generator.py +76 -0
- mindspore/ops_generate/pyboost/pyboost_internal_functions_h_generator.py +76 -0
- mindspore/ops_generate/pyboost/pyboost_internal_kernel_info_adapter_generator.py +125 -0
- mindspore/ops_generate/pyboost/pyboost_native_grad_functions_generator.py +4 -3
- mindspore/ops_generate/pyboost/pyboost_op_cpp_code_generator.py +348 -61
- mindspore/ops_generate/pyboost/pyboost_overload_functions_cpp_generator.py +1 -1
- mindspore/ops_generate/pyboost/pyboost_utils.py +118 -9
- mindspore/ops_generate/tensor_py_cc_generator.py +1 -24
- mindspore/parallel/_auto_parallel_context.py +9 -17
- mindspore/parallel/_cell_wrapper.py +106 -40
- mindspore/parallel/_parallel_serialization.py +4 -3
- mindspore/parallel/_ps_context.py +4 -6
- mindspore/parallel/_tensor.py +167 -12
- mindspore/parallel/_transformer/moe.py +1 -1
- mindspore/parallel/_transformer/transformer.py +17 -12
- mindspore/parallel/_utils.py +5 -11
- mindspore/parallel/auto_parallel.py +33 -12
- mindspore/parallel/checkpoint_convert.py +3 -3
- mindspore/parallel/checkpoint_transform.py +5 -1
- mindspore/parallel/cluster/process_entity/_api.py +88 -49
- mindspore/parallel/cluster/process_entity/_utils.py +95 -7
- mindspore/parallel/cluster/run.py +48 -7
- mindspore/parallel/function/__init__.py +8 -1
- mindspore/parallel/function/reshard_func.py +7 -6
- mindspore/parallel/nn/__init__.py +15 -2
- mindspore/parallel/nn/parallel_cell_wrapper.py +50 -14
- mindspore/parallel/nn/parallel_grad_reducer.py +7 -14
- mindspore/parallel/shard.py +9 -23
- mindspore/parallel/transform_safetensors.py +468 -174
- mindspore/pgodb140.dll +0 -0
- mindspore/pgort140.dll +0 -0
- mindspore/profiler/__init__.py +2 -1
- mindspore/profiler/analysis/parser/timeline_assembly_factory/ascend_timeline_assembler.py +7 -7
- mindspore/profiler/analysis/parser/timeline_assembly_factory/base_timeline_assembler.py +3 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/trace_view_container.py +3 -0
- mindspore/profiler/analysis/parser/timeline_creator/cpu_op_timeline_creator.py +3 -3
- mindspore/profiler/analysis/parser/timeline_creator/fwk_timeline_creator.py +3 -3
- mindspore/profiler/analysis/parser/timeline_creator/msprof_timeline_creator.py +4 -4
- mindspore/profiler/analysis/parser/timeline_creator/scope_layer_timeline_creator.py +3 -3
- mindspore/profiler/analysis/parser/timeline_event/fwk_event.py +4 -1
- mindspore/profiler/analysis/parser/timeline_event/timeline_event_pool.py +2 -1
- mindspore/profiler/analysis/task_manager.py +1 -1
- mindspore/profiler/analysis/viewer/ascend_communication_viewer.py +5 -1
- mindspore/profiler/analysis/viewer/ascend_integrate_viewer.py +2 -1
- mindspore/profiler/analysis/viewer/ascend_kernel_details_viewer.py +10 -9
- mindspore/profiler/analysis/viewer/ascend_op_memory_viewer.py +43 -23
- mindspore/profiler/analysis/viewer/ascend_step_trace_time_viewer.py +3 -2
- mindspore/profiler/analysis/viewer/ms_minddata_viewer.py +9 -5
- mindspore/profiler/analysis/viewer/ms_operator_details_viewer.py +132 -0
- mindspore/profiler/common/constant.py +16 -0
- mindspore/profiler/common/msprof_cmd_tool.py +2 -2
- mindspore/profiler/common/path_manager.py +9 -0
- mindspore/profiler/common/profiler_context.py +50 -29
- mindspore/profiler/common/profiler_info.py +0 -16
- mindspore/profiler/common/profiler_meta_data.py +1 -0
- mindspore/profiler/common/profiler_op_analyse.py +239 -0
- mindspore/profiler/common/profiler_output_path.py +23 -8
- mindspore/profiler/common/profiler_parameters.py +128 -35
- mindspore/profiler/dynamic_profile/__init__.py +0 -0
- mindspore/profiler/dynamic_profile/dynamic_monitor_proxy.py +39 -0
- mindspore/profiler/dynamic_profile/dynamic_profiler_config_context.py +666 -0
- mindspore/profiler/dynamic_profile/dynamic_profiler_utils.py +62 -0
- mindspore/profiler/dynamic_profiler.py +374 -338
- mindspore/profiler/envprofiler.py +42 -12
- mindspore/profiler/experimental_config.py +112 -7
- mindspore/profiler/mstx.py +33 -12
- mindspore/profiler/platform/__init__.py +2 -3
- mindspore/profiler/platform/cpu_profiler.py +10 -4
- mindspore/profiler/platform/npu_profiler.py +30 -20
- mindspore/profiler/profiler.py +218 -154
- mindspore/profiler/profiler_action_controller.py +65 -77
- mindspore/profiler/profiler_interface.py +2 -2
- mindspore/profiler/schedule.py +10 -4
- mindspore/rewrite/common/config.py +1 -0
- mindspore/rewrite/common/namer.py +1 -0
- mindspore/rewrite/common/namespace.py +1 -0
- mindspore/rewrite/node/node.py +31 -11
- mindspore/rewrite/parsers/assign_parser.py +1 -1
- mindspore/rewrite/symbol_tree/symbol_tree.py +2 -2
- mindspore/run_check/_check_version.py +7 -10
- mindspore/runtime/__init__.py +8 -6
- mindspore/runtime/event.py +10 -4
- mindspore/runtime/executor.py +87 -45
- mindspore/runtime/memory.py +22 -30
- mindspore/runtime/thread_bind_core.py +299 -165
- mindspore/safeguard/rewrite_obfuscation.py +12 -13
- mindspore/swresample-4.dll +0 -0
- mindspore/swscale-6.dll +0 -0
- mindspore/tbbmalloc.dll +0 -0
- mindspore/tinyxml2.dll +0 -0
- mindspore/train/_utils.py +9 -5
- mindspore/train/amp.py +43 -23
- mindspore/train/callback/__init__.py +5 -5
- mindspore/train/callback/_callback.py +2 -1
- mindspore/train/callback/_checkpoint.py +4 -14
- mindspore/train/callback/_flops_collector.py +11 -7
- mindspore/train/callback/_landscape.py +0 -1
- mindspore/train/callback/_train_fault_tolerance.py +72 -18
- mindspore/train/data_sink.py +15 -6
- mindspore/train/dataset_helper.py +14 -5
- mindspore/train/model.py +49 -47
- mindspore/train/serialization.py +168 -126
- mindspore/train/summary/summary_record.py +13 -2
- mindspore/train/train_thor/model_thor.py +2 -2
- mindspore/turbojpeg.dll +0 -0
- mindspore/utils/__init__.py +3 -2
- mindspore/utils/dryrun.py +0 -6
- mindspore/utils/runtime_execution_order_check.py +162 -78
- mindspore/utils/sdc_detect.py +68 -0
- mindspore/utils/utils.py +14 -17
- mindspore/vcmeta.dll +0 -0
- mindspore/vcruntime140.dll +0 -0
- mindspore/vcruntime140_1.dll +0 -0
- mindspore/version.py +1 -1
- {mindspore-2.6.0.dist-info → mindspore-2.7.0.dist-info}/METADATA +5 -4
- {mindspore-2.6.0.dist-info → mindspore-2.7.0.dist-info}/RECORD +400 -439
- mindspore/_deprecated/jit.py +0 -198
- mindspore/_extends/remote/kernel_build_server_ascend.py +0 -75
- mindspore/communication/_hccl_management.py +0 -297
- mindspore/experimental/es/embedding_service.py +0 -891
- mindspore/experimental/es/embedding_service_layer.py +0 -581
- mindspore/profiler/common/validator/__init__.py +0 -14
- mindspore/profiler/common/validator/validate_path.py +0 -84
- mindspore/profiler/parser/__init__.py +0 -14
- mindspore/profiler/parser/aicpu_data_parser.py +0 -272
- mindspore/profiler/parser/ascend_analysis/__init__.py +0 -14
- mindspore/profiler/parser/ascend_analysis/constant.py +0 -71
- mindspore/profiler/parser/ascend_analysis/file_manager.py +0 -180
- mindspore/profiler/parser/ascend_analysis/function_event.py +0 -185
- mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +0 -136
- mindspore/profiler/parser/ascend_analysis/fwk_file_parser.py +0 -131
- mindspore/profiler/parser/ascend_analysis/msprof_timeline_parser.py +0 -104
- mindspore/profiler/parser/ascend_analysis/path_manager.py +0 -313
- mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +0 -123
- mindspore/profiler/parser/ascend_analysis/tlv_decoder.py +0 -86
- mindspore/profiler/parser/ascend_analysis/trace_event_manager.py +0 -75
- mindspore/profiler/parser/ascend_cluster_generator.py +0 -116
- mindspore/profiler/parser/ascend_communicate_generator.py +0 -314
- mindspore/profiler/parser/ascend_flops_generator.py +0 -116
- mindspore/profiler/parser/ascend_fpbp_generator.py +0 -82
- mindspore/profiler/parser/ascend_hccl_generator.py +0 -271
- mindspore/profiler/parser/ascend_integrate_generator.py +0 -42
- mindspore/profiler/parser/ascend_memory_generator.py +0 -185
- mindspore/profiler/parser/ascend_msprof_exporter.py +0 -282
- mindspore/profiler/parser/ascend_msprof_generator.py +0 -187
- mindspore/profiler/parser/ascend_op_generator.py +0 -334
- mindspore/profiler/parser/ascend_steptrace_generator.py +0 -94
- mindspore/profiler/parser/ascend_timeline_generator.py +0 -545
- mindspore/profiler/parser/base_timeline_generator.py +0 -483
- mindspore/profiler/parser/container.py +0 -229
- mindspore/profiler/parser/cpu_gpu_timeline_generator.py +0 -697
- mindspore/profiler/parser/flops_parser.py +0 -531
- mindspore/profiler/parser/framework_enum.py +0 -111
- mindspore/profiler/parser/framework_parser.py +0 -464
- mindspore/profiler/parser/framework_struct.py +0 -61
- mindspore/profiler/parser/gpu_analysis/__init__.py +0 -14
- mindspore/profiler/parser/gpu_analysis/function_event.py +0 -44
- mindspore/profiler/parser/gpu_analysis/fwk_file_parser.py +0 -89
- mindspore/profiler/parser/gpu_analysis/profiler_info_parser.py +0 -72
- mindspore/profiler/parser/hccl_parser.py +0 -573
- mindspore/profiler/parser/hwts_log_parser.py +0 -122
- mindspore/profiler/parser/integrator.py +0 -526
- mindspore/profiler/parser/memory_usage_parser.py +0 -277
- mindspore/profiler/parser/minddata_analyzer.py +0 -800
- mindspore/profiler/parser/minddata_parser.py +0 -186
- mindspore/profiler/parser/minddata_pipeline_parser.py +0 -299
- mindspore/profiler/parser/op_intermediate_parser.py +0 -149
- mindspore/profiler/parser/optime_parser.py +0 -250
- mindspore/profiler/parser/profiler_info.py +0 -213
- mindspore/profiler/parser/step_trace_parser.py +0 -666
- mindspore/utils/hooks.py +0 -81
- /mindspore/common/{_auto_dynamic.py → dynamic_shape/_auto_dynamic.py} +0 -0
- {mindspore-2.6.0.dist-info → mindspore-2.7.0.dist-info}/WHEEL +0 -0
- {mindspore-2.6.0.dist-info → mindspore-2.7.0.dist-info}/entry_points.txt +0 -0
- {mindspore-2.6.0.dist-info → mindspore-2.7.0.dist-info}/top_level.txt +0 -0
mindspore/_deprecated/jit.py
DELETED
|
@@ -1,198 +0,0 @@
|
|
|
1
|
-
# Copyright 2024 Huawei Technologies Co., Ltd
|
|
2
|
-
#
|
|
3
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
-
# you may not use this file except in compliance with the License.
|
|
5
|
-
# You may obtain a copy of the License at
|
|
6
|
-
#
|
|
7
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
-
#
|
|
9
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
-
# See the License for the specific language governing permissions and
|
|
13
|
-
# limitations under the License.
|
|
14
|
-
# ============================================================================
|
|
15
|
-
"""Store the deprecated api temporarily."""
|
|
16
|
-
from __future__ import absolute_import
|
|
17
|
-
|
|
18
|
-
import os
|
|
19
|
-
import types
|
|
20
|
-
import time
|
|
21
|
-
from functools import wraps
|
|
22
|
-
from mindspore import log as logger
|
|
23
|
-
from mindspore.common.tensor import Tensor as PythonTensor
|
|
24
|
-
from mindspore.common.api import _get_jit_hash, _process_dyn_args, _handle_func_args, _JitExecutor
|
|
25
|
-
from mindspore.parallel._utils import _is_pynative_parallel
|
|
26
|
-
from mindspore._c_expression.amp import get_curr_amp_strategy
|
|
27
|
-
from mindspore.common._pijit_context import PIJitCaptureContext
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
_PYNATIVE_PARALLEL_FUNC_NAME = "after_shard"
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
def jit(fn=None, mode="PSJit", input_signature=None, hash_args=None, jit_config=None, compile_once=False):
|
|
34
|
-
"""
|
|
35
|
-
Create a callable MindSpore graph from a Python function.
|
|
36
|
-
|
|
37
|
-
This allows the MindSpore runtime to apply optimizations based on graph.
|
|
38
|
-
|
|
39
|
-
Note:
|
|
40
|
-
- If `input_signature` is specified, each input of `fn` must be a Tensor. And the input arguments for `fn`
|
|
41
|
-
will not accept `**kwargs`.
|
|
42
|
-
- It is not supported to run a function with decoration @jit(mode=“PIJit”)
|
|
43
|
-
in static graph mode, in which case the decoration @jit(mode=“PIJit”) is considered invalid.
|
|
44
|
-
- Calls to functions with decorated @jit(mode=“PIJit”) inside functions
|
|
45
|
-
decorated with @jit(mode=“PIJit”) are not supported,
|
|
46
|
-
and the decoration @jit(mode=“PIJit”) is considered invalid.
|
|
47
|
-
|
|
48
|
-
Args:
|
|
49
|
-
fn (Function): The Python function that will be run as a graph. Default: ``None`` .
|
|
50
|
-
mode (str): The type of jit used, the value of mode should be ``PIJit`` or ``PSJit``. Default: ``PSJit`` .
|
|
51
|
-
|
|
52
|
-
- PSJit:
|
|
53
|
-
Parse python ast to build graph.
|
|
54
|
-
- PIJit:
|
|
55
|
-
Parse python bytecode to build graph at runtime.
|
|
56
|
-
|
|
57
|
-
input_signature (Union[Tuple, List, Dict, Tensor]): The Tensor which describes the input arguments. The
|
|
58
|
-
shape and dtype of the Tensor will be supplied to this function. If `input_signature` is specified, the
|
|
59
|
-
input parameters of `fn` cannot accept `**kwargs`, and the shape and dtype of actual inputs should keep the
|
|
60
|
-
same as `input_signature`. Otherwise, TypeError will be raised. There are two mode for `input_signature`:
|
|
61
|
-
|
|
62
|
-
- Full mode: Arguments is a Tuple, List or a Tensor, and they will be used as all compile inputs
|
|
63
|
-
for graph-compiling.
|
|
64
|
-
- Incremental mode: Argument is a Dict, and they will set to some of the graph inputs, which will be
|
|
65
|
-
substituted into the input at the corresponding position for graph-compiling.
|
|
66
|
-
|
|
67
|
-
Default: ``None`` .
|
|
68
|
-
|
|
69
|
-
hash_args (Union[Object, List or Tuple of Objects]): The local free variables used inside `fn`,
|
|
70
|
-
like functions or objects of class defined outside `fn`. Calling `fn` again with change of `hash_args`
|
|
71
|
-
will trigger recompilation. Default: ``None`` .
|
|
72
|
-
jit_config (JitConfig): Jit config for compile. Default: ``None`` .
|
|
73
|
-
compile_once(bool): ``True``: The function would be compiled once when it was created many times.
|
|
74
|
-
But it may be wrong if the free variables were changed. ``False`` : It would be recompiled when
|
|
75
|
-
it was created again.
|
|
76
|
-
Default: ``False`` .
|
|
77
|
-
|
|
78
|
-
Returns:
|
|
79
|
-
Function, if `fn` is not None, returns a callable function that will execute the compiled function; If `fn` is
|
|
80
|
-
None, returns a decorator and when this decorator invokes with a single `fn` argument, the callable function is
|
|
81
|
-
equal to the case when `fn` is not None.
|
|
82
|
-
|
|
83
|
-
Supported Platforms:
|
|
84
|
-
``Ascend`` ``GPU`` ``CPU``
|
|
85
|
-
|
|
86
|
-
Examples:
|
|
87
|
-
>>> import numpy as np
|
|
88
|
-
>>> from mindspore import Tensor
|
|
89
|
-
>>> from mindspore import ops
|
|
90
|
-
>>> import mindspore._deprecated.jit as jit
|
|
91
|
-
...
|
|
92
|
-
>>> x = Tensor(np.ones([1, 1, 3, 3]).astype(np.float32))
|
|
93
|
-
>>> y = Tensor(np.ones([1, 1, 3, 3]).astype(np.float32))
|
|
94
|
-
...
|
|
95
|
-
>>> # create a callable MindSpore graph by calling decorator @jit
|
|
96
|
-
>>> def tensor_add(x, y):
|
|
97
|
-
... z = x + y
|
|
98
|
-
... return z
|
|
99
|
-
...
|
|
100
|
-
>>> tensor_add_graph = jit(fn=tensor_add)
|
|
101
|
-
>>> out = tensor_add_graph(x, y)
|
|
102
|
-
...
|
|
103
|
-
>>> # create a callable MindSpore graph through decorator @jit
|
|
104
|
-
>>> @jit
|
|
105
|
-
... def tensor_add_with_dec(x, y):
|
|
106
|
-
... z = x + y
|
|
107
|
-
... return z
|
|
108
|
-
...
|
|
109
|
-
>>> out = tensor_add_with_dec(x, y)
|
|
110
|
-
...
|
|
111
|
-
>>> # create a callable MindSpore graph through decorator @jit with input_signature parameter
|
|
112
|
-
>>> @jit(input_signature=(Tensor(np.ones([1, 1, 3, 3]).astype(np.float32)),
|
|
113
|
-
... Tensor(np.ones([1, 1, 3, 3]).astype(np.float32))))
|
|
114
|
-
... def tensor_add_with_sig(x, y):
|
|
115
|
-
... z = x + y
|
|
116
|
-
... return z
|
|
117
|
-
...
|
|
118
|
-
>>> out = tensor_add_with_sig(x, y)
|
|
119
|
-
...
|
|
120
|
-
>>> @jit(input_signature={"y": Tensor(np.ones([1, 1, 3, 3]).astype(np.float32))})
|
|
121
|
-
... def tensor_add_with_sig_1(x, y):
|
|
122
|
-
... z = x + y
|
|
123
|
-
... return z
|
|
124
|
-
...
|
|
125
|
-
>>> out1 = tensor_add_with_sig_1(x, y)
|
|
126
|
-
...
|
|
127
|
-
... # Set hash_args as fn, otherwise cache of compiled closure_fn will not be reused.
|
|
128
|
-
... # While fn differs during calling again, recompilation will be triggered.
|
|
129
|
-
>>> def func(x):
|
|
130
|
-
... return ops.exp(x)
|
|
131
|
-
...
|
|
132
|
-
>>> def closure_fn(x, fn):
|
|
133
|
-
... @jit(hash_args=fn)
|
|
134
|
-
... def inner_fn(a):
|
|
135
|
-
... return fn(a)
|
|
136
|
-
... return inner_fn(x)
|
|
137
|
-
...
|
|
138
|
-
>>> inputs = Tensor(np.ones([10, 10, 10]).astype(np.float32))
|
|
139
|
-
>>> for i in range(10):
|
|
140
|
-
... closure_fn(inputs, func)
|
|
141
|
-
...
|
|
142
|
-
... # Set compile_once = True, otherwise the train_step will be compiled again.
|
|
143
|
-
>>> def train(x):
|
|
144
|
-
... @jit(compile_once = True)
|
|
145
|
-
... def train_step(x):
|
|
146
|
-
... return ops.exp(x)
|
|
147
|
-
... for i in range(10):
|
|
148
|
-
... train_step(x)
|
|
149
|
-
...
|
|
150
|
-
>>> inputs = Tensor(np.ones([10, 10, 10]).astype(np.float32))
|
|
151
|
-
>>> for i in range(10):
|
|
152
|
-
... train(inputs)
|
|
153
|
-
"""
|
|
154
|
-
|
|
155
|
-
def wrap_mindspore(func):
|
|
156
|
-
if not isinstance(compile_once, bool):
|
|
157
|
-
logger.warning(f"The parameter `compile_once` of jit should be a bool, "
|
|
158
|
-
f"but got {type(compile_once)}.")
|
|
159
|
-
if hash_args:
|
|
160
|
-
hash_obj = _get_jit_hash(hash_args)
|
|
161
|
-
elif compile_once:
|
|
162
|
-
hash_obj = 0
|
|
163
|
-
else:
|
|
164
|
-
hash_obj = int(time.time() * 1e9)
|
|
165
|
-
|
|
166
|
-
dyn_args = _process_dyn_args(func, input_signature)
|
|
167
|
-
|
|
168
|
-
@wraps(func)
|
|
169
|
-
def staging_specialize(*args, **kwargs):
|
|
170
|
-
if os.getenv("MS_JIT") == '0':
|
|
171
|
-
return func(*args, **kwargs)
|
|
172
|
-
|
|
173
|
-
args, kwargs = _handle_func_args(func, *args, **kwargs)
|
|
174
|
-
|
|
175
|
-
process_obj = None
|
|
176
|
-
if args and not isinstance(args[0], PythonTensor) and hasattr(args[0], func.__name__):
|
|
177
|
-
process_obj = args[0]
|
|
178
|
-
# only the function or cell instance wrapped by shard will fall into this branch
|
|
179
|
-
if _is_pynative_parallel() and func.__name__ == _PYNATIVE_PARALLEL_FUNC_NAME:
|
|
180
|
-
process_obj = hash_args
|
|
181
|
-
# Handle auto mixed precision strategy.
|
|
182
|
-
if not hasattr(func, "amp_strategy"):
|
|
183
|
-
if isinstance(func, types.MethodType):
|
|
184
|
-
setattr(func.__func__, "amp_strategy", get_curr_amp_strategy())
|
|
185
|
-
else:
|
|
186
|
-
setattr(func, "amp_strategy", get_curr_amp_strategy())
|
|
187
|
-
out = _JitExecutor(func, hash_obj, dyn_args, process_obj, jit_config)(*args, **kwargs)
|
|
188
|
-
return out
|
|
189
|
-
|
|
190
|
-
return staging_specialize
|
|
191
|
-
|
|
192
|
-
wrap_func = wrap_mindspore
|
|
193
|
-
if mode == "PIJit":
|
|
194
|
-
wrap_func = PIJitCaptureContext(jit_config, input_signature)
|
|
195
|
-
|
|
196
|
-
if fn is not None:
|
|
197
|
-
return wrap_func(fn)
|
|
198
|
-
return wrap_func
|
|
@@ -1,75 +0,0 @@
|
|
|
1
|
-
# Copyright 2020-2021 Huawei Technologies Co., Ltd
|
|
2
|
-
#
|
|
3
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
-
# you may not use this file except in compliance with the License.
|
|
5
|
-
# You may obtain a copy of the License at
|
|
6
|
-
#
|
|
7
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
-
#
|
|
9
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
-
# See the License for the specific language governing permissions and
|
|
13
|
-
# limitations under the License.
|
|
14
|
-
# ============================================================================
|
|
15
|
-
"""kernel build server for ascend"""
|
|
16
|
-
import sys
|
|
17
|
-
import warnings
|
|
18
|
-
import json
|
|
19
|
-
|
|
20
|
-
from mindspore._extends.parallel_compile.tbe_compiler.tbe_job_manager import TbeJobManager
|
|
21
|
-
from mindspore._extends.remote.kernel_build_server import Messager, get_logger, AkgBuilder
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class AscendMessager(Messager):
|
|
25
|
-
"""
|
|
26
|
-
Ascend Messager
|
|
27
|
-
It works as a server, communicating with c++ client.
|
|
28
|
-
"""
|
|
29
|
-
|
|
30
|
-
def __init__(self, fdin, fdout):
|
|
31
|
-
super().__init__(fdin, fdout)
|
|
32
|
-
get_logger().info("[TRACE] Ascend Messager init...")
|
|
33
|
-
self.tbe_builder = TbeJobManager()
|
|
34
|
-
self.akg_builder = AkgBuilder("ASCEND")
|
|
35
|
-
|
|
36
|
-
def handle(self):
|
|
37
|
-
"""
|
|
38
|
-
Communicate with remote client.
|
|
39
|
-
Reference protocol between them at PR#3821 and PR#3935
|
|
40
|
-
"""
|
|
41
|
-
arg = self.get_message()
|
|
42
|
-
if arg.startswith('AKG'):
|
|
43
|
-
self.akg_builder.handle(self, arg)
|
|
44
|
-
else:
|
|
45
|
-
job_json = dict()
|
|
46
|
-
try:
|
|
47
|
-
job_json = json.loads(arg)
|
|
48
|
-
except json.decoder.JSONDecodeError:
|
|
49
|
-
get_logger().error("[TRACE] Request is not a json message: {}".format(arg))
|
|
50
|
-
self.send_ack(False)
|
|
51
|
-
self.exit()
|
|
52
|
-
finally:
|
|
53
|
-
pass
|
|
54
|
-
|
|
55
|
-
if "job_type" in job_json:
|
|
56
|
-
res = self.tbe_builder.job_handler(arg)
|
|
57
|
-
self.send_res(res)
|
|
58
|
-
else:
|
|
59
|
-
get_logger().error("[TRACE] Request is not a TBE Job message: {}".format(arg))
|
|
60
|
-
self.send_ack(False)
|
|
61
|
-
self.exit()
|
|
62
|
-
|
|
63
|
-
def exit(self):
|
|
64
|
-
self.tbe_builder.reset()
|
|
65
|
-
get_logger().info("[TRACE] Ascend Messager Exit...")
|
|
66
|
-
exit()
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
if __name__ == '__main__':
|
|
70
|
-
warnings.simplefilter("ignore")
|
|
71
|
-
if len(sys.argv) != 3:
|
|
72
|
-
raise Exception('Incorrect argv: {}'.format(sys.argv))
|
|
73
|
-
get_logger().debug(f"[TRACE] argv: {str(sys.argv)}")
|
|
74
|
-
messager = AscendMessager(int(sys.argv[1]), int(sys.argv[2]))
|
|
75
|
-
messager.run()
|
|
@@ -1,297 +0,0 @@
|
|
|
1
|
-
# Copyright 2020 Huawei Technologies Co., Ltd
|
|
2
|
-
|
|
3
|
-
#
|
|
4
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
-
# you may not use this file except in compliance with the License.
|
|
6
|
-
# You may obtain a copy of the License at
|
|
7
|
-
#
|
|
8
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
-
#
|
|
10
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
-
# See the License for the specific language governing permissions and
|
|
14
|
-
# limitations under the License.
|
|
15
|
-
# ============================================================================
|
|
16
|
-
"""HCCL management API"""
|
|
17
|
-
from __future__ import absolute_import
|
|
18
|
-
from __future__ import division
|
|
19
|
-
|
|
20
|
-
import ctypes
|
|
21
|
-
import os
|
|
22
|
-
|
|
23
|
-
from mindspore import context
|
|
24
|
-
from mindspore._c_expression import get_hccl_rank_id, get_hccl_rank_size
|
|
25
|
-
|
|
26
|
-
MAX_GROUP_NAME_LEN = 127
|
|
27
|
-
MAX_RANK_NUM = 4096
|
|
28
|
-
HCCL_LIB = 'libhccl_plugin.so'
|
|
29
|
-
HCCL_LIB_CTYPES = ""
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
def check_group(group):
|
|
33
|
-
"""
|
|
34
|
-
A function that check if a collection communication group is legal.
|
|
35
|
-
|
|
36
|
-
Returns:
|
|
37
|
-
None
|
|
38
|
-
"""
|
|
39
|
-
if isinstance(group, (str)):
|
|
40
|
-
group_len = len(group)
|
|
41
|
-
if group_len > MAX_GROUP_NAME_LEN or group_len == 0:
|
|
42
|
-
raise ValueError("The length of communication group name must be in range [1, 127), "
|
|
43
|
-
"but got the value : {} ".format(group_len))
|
|
44
|
-
else:
|
|
45
|
-
raise TypeError("The type of communication group name must be type of string, "
|
|
46
|
-
"but got 'group' type : {}.".format(type(group)))
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def check_rank_num(rank_num):
|
|
50
|
-
"""
|
|
51
|
-
A function that check if a collection communication rank number is legal.If not raise error.
|
|
52
|
-
|
|
53
|
-
Returns:
|
|
54
|
-
None
|
|
55
|
-
"""
|
|
56
|
-
if isinstance(rank_num, (int)):
|
|
57
|
-
if rank_num > MAX_RANK_NUM or rank_num <= 0:
|
|
58
|
-
raise ValueError("For 'create_group', the size of argument 'rand_ids' should be greater than 0 and"
|
|
59
|
-
"less than {}, but got the size of 'rank_ids' : {}.".format(MAX_RANK_NUM, rank_num))
|
|
60
|
-
else:
|
|
61
|
-
raise TypeError("The argument 'rank_num' must be type of int, "
|
|
62
|
-
"but got 'rank_num' type : {}.".format(type(rank_num)))
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
def check_rank_id(rank_id):
|
|
66
|
-
"""
|
|
67
|
-
A function that check if a collection communication rank id is legal.If not raise error.
|
|
68
|
-
|
|
69
|
-
Returns:
|
|
70
|
-
None
|
|
71
|
-
"""
|
|
72
|
-
if isinstance(rank_id, (int)):
|
|
73
|
-
if rank_id >= MAX_RANK_NUM or rank_id < 0:
|
|
74
|
-
raise ValueError("The rand id in the communication group must be greater or equal 0 and "
|
|
75
|
-
"less than {}, but got type value : {}.".format(MAX_RANK_NUM, rank_id))
|
|
76
|
-
else:
|
|
77
|
-
raise TypeError("The rand id in the communication group must be must be type of int, "
|
|
78
|
-
"but got type value : {}.".format(type(rank_id)))
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
def load_lib():
|
|
82
|
-
"""load hccl lib"""
|
|
83
|
-
try:
|
|
84
|
-
base_dir = os.path.dirname(os.path.realpath(__file__))
|
|
85
|
-
lib_path = os.path.join(base_dir, "../lib/plugin/ascend", HCCL_LIB)
|
|
86
|
-
hccl_lib = ctypes.CDLL(lib_path)
|
|
87
|
-
except Exception:
|
|
88
|
-
raise RuntimeError('Get hccl lib error.')
|
|
89
|
-
|
|
90
|
-
global HCCL_LIB_CTYPES
|
|
91
|
-
HCCL_LIB_CTYPES = hccl_lib
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
def c_str(string):
|
|
95
|
-
"""Convert a python string to C string."""
|
|
96
|
-
if not isinstance(string, str):
|
|
97
|
-
string = string.decode('ascii')
|
|
98
|
-
return ctypes.c_char_p(string.encode('utf-8'))
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
def c_array(ctype, values):
|
|
102
|
-
"""Create ctypes array from a python array."""
|
|
103
|
-
return (ctype * len(values))(*values)
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
def create_group(group, rank_num, rank_ids):
|
|
107
|
-
"""
|
|
108
|
-
Create group.
|
|
109
|
-
|
|
110
|
-
A function that creates a collection communication group which includes 'rank_num'
|
|
111
|
-
device and 'rank_ids' is the list of these ranks of devices.
|
|
112
|
-
|
|
113
|
-
Note:
|
|
114
|
-
The world group can not be created.
|
|
115
|
-
|
|
116
|
-
Returns:
|
|
117
|
-
None
|
|
118
|
-
"""
|
|
119
|
-
check_group(group)
|
|
120
|
-
check_rank_num(rank_num)
|
|
121
|
-
if isinstance(rank_ids, (list)):
|
|
122
|
-
if rank_num != len(rank_ids):
|
|
123
|
-
raise ValueError("The argument 'rank_num' number should be equal to the length "
|
|
124
|
-
"of rank_ids, but got 'rank_num' value : {} and 'rank_ids' value : {}."
|
|
125
|
-
.format(rank_num, rank_ids))
|
|
126
|
-
for rank_id in rank_ids:
|
|
127
|
-
if not isinstance(rank_id, (int)) or rank_id < 0:
|
|
128
|
-
raise ValueError("The elements of argument 'rank_ids' must be "
|
|
129
|
-
"unsigned integer, but got the type : {}".format(type(rank_id)))
|
|
130
|
-
c_array_rank_ids = c_array(ctypes.c_uint, rank_ids)
|
|
131
|
-
c_rank_num = ctypes.c_uint(rank_num)
|
|
132
|
-
c_group = c_str(group)
|
|
133
|
-
ret = HCCL_LIB_CTYPES.HcomCreateGroup(c_group, c_rank_num, c_array_rank_ids)
|
|
134
|
-
if ret != 0:
|
|
135
|
-
raise RuntimeError('Create group error, the error code is {}.'.format(ret))
|
|
136
|
-
else:
|
|
137
|
-
raise TypeError("For 'create_group', the argument 'rank_ids' must be type of list, "
|
|
138
|
-
"but got 'rank_ids' type : {}.".format(type(rank_ids)))
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
def destroy_group(group):
|
|
142
|
-
"""
|
|
143
|
-
A function that destroy the group which created by user.
|
|
144
|
-
|
|
145
|
-
Note:
|
|
146
|
-
The world group can not be destroy.
|
|
147
|
-
|
|
148
|
-
Returns:
|
|
149
|
-
None
|
|
150
|
-
"""
|
|
151
|
-
check_group(group)
|
|
152
|
-
c_group = c_str(group)
|
|
153
|
-
ret = HCCL_LIB_CTYPES.HcomDestroyGroup(c_group)
|
|
154
|
-
if ret != 0:
|
|
155
|
-
raise RuntimeError('Destroy group error.')
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
def get_rank_size(group="hccl_world_group"):
|
|
159
|
-
"""
|
|
160
|
-
A function that returns the number of ranks within the given collection communication group.
|
|
161
|
-
|
|
162
|
-
Note:
|
|
163
|
-
The default group is hccl_world_group.
|
|
164
|
-
|
|
165
|
-
Returns:
|
|
166
|
-
An integer scalar with the num of ranks.
|
|
167
|
-
"""
|
|
168
|
-
|
|
169
|
-
if context.get_context("mode") == context.PYNATIVE_MODE:
|
|
170
|
-
return get_hccl_rank_size()
|
|
171
|
-
|
|
172
|
-
check_group(group)
|
|
173
|
-
c_group = c_str(group)
|
|
174
|
-
c_rank_size = ctypes.c_uint()
|
|
175
|
-
ret = HCCL_LIB_CTYPES.HcomGetRankSize(c_group, ctypes.byref(c_rank_size))
|
|
176
|
-
if ret != 0:
|
|
177
|
-
raise RuntimeError('Get rank size error.')
|
|
178
|
-
|
|
179
|
-
return c_rank_size.value
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
def get_rank_id(group="hccl_world_group"):
|
|
183
|
-
"""
|
|
184
|
-
A function that returns the rank id of the calling process, within the given collection communication group.
|
|
185
|
-
|
|
186
|
-
Returns:
|
|
187
|
-
An integer scalar with the rank id of the calling process.
|
|
188
|
-
"""
|
|
189
|
-
|
|
190
|
-
if context.get_context("mode") == context.PYNATIVE_MODE:
|
|
191
|
-
return get_hccl_rank_id()
|
|
192
|
-
|
|
193
|
-
check_group(group)
|
|
194
|
-
c_group = c_str(group)
|
|
195
|
-
c_rank_id = ctypes.c_uint()
|
|
196
|
-
ret = HCCL_LIB_CTYPES.HcomGetRankId(c_group, ctypes.byref(c_rank_id))
|
|
197
|
-
if ret != 0:
|
|
198
|
-
raise RuntimeError('Get rank id error.')
|
|
199
|
-
|
|
200
|
-
return c_rank_id.value
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
def get_local_rank_size(group="hccl_world_group"):
|
|
205
|
-
"""
|
|
206
|
-
A function that returns the number of local ranks within the given collection communication group.
|
|
207
|
-
|
|
208
|
-
Note:
|
|
209
|
-
The default group is hccl_world_group.
|
|
210
|
-
|
|
211
|
-
Returns:
|
|
212
|
-
An integer scalar with the num of local ranks.
|
|
213
|
-
"""
|
|
214
|
-
if context.get_context("mode") is context.PYNATIVE_MODE:
|
|
215
|
-
raise RuntimeError("The function 'get_local_rank_size' is not supported in PYNATIVE_MODE, "
|
|
216
|
-
"'get_local_rank_size' only support GRAPH_MODE")
|
|
217
|
-
check_group(group)
|
|
218
|
-
c_group = c_str(group)
|
|
219
|
-
c_local_rank_size = ctypes.c_uint()
|
|
220
|
-
ret = HCCL_LIB_CTYPES.HcomGetLocalRankSize(c_group, ctypes.byref(c_local_rank_size))
|
|
221
|
-
if ret != 0:
|
|
222
|
-
raise RuntimeError('Get local rank size error.')
|
|
223
|
-
|
|
224
|
-
return c_local_rank_size.value
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
def get_local_rank_id(group="hccl_world_group"):
|
|
228
|
-
"""
|
|
229
|
-
Get local rank id.
|
|
230
|
-
|
|
231
|
-
A function that returns the local rank id of the calling process, within the given collection communication group.
|
|
232
|
-
|
|
233
|
-
Returns:
|
|
234
|
-
An integer scalar with the local rank id of the calling process.
|
|
235
|
-
"""
|
|
236
|
-
|
|
237
|
-
if context.get_context("mode") is context.PYNATIVE_MODE:
|
|
238
|
-
raise RuntimeError("The function 'get_local_rank_id' is not supported in PYNATIVE_MODE, "
|
|
239
|
-
"'get_local_rank_id' only support GRAPH_MODE")
|
|
240
|
-
check_group(group)
|
|
241
|
-
c_group = c_str(group)
|
|
242
|
-
c_local_rank_id = ctypes.c_uint()
|
|
243
|
-
ret = HCCL_LIB_CTYPES.HcomGetLocalRankId(c_group, ctypes.byref(c_local_rank_id))
|
|
244
|
-
if ret != 0:
|
|
245
|
-
raise RuntimeError('Get local rank id error.')
|
|
246
|
-
|
|
247
|
-
return c_local_rank_id.value
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
def get_world_rank_from_group_rank(group, group_rank_id):
|
|
251
|
-
"""
|
|
252
|
-
Get world rank from group rank.
|
|
253
|
-
|
|
254
|
-
A function that returns the rank id in the world group corresponding to the
|
|
255
|
-
rank which id is 'group_rank_id' in the user group.
|
|
256
|
-
|
|
257
|
-
Returns:
|
|
258
|
-
An integer scalar with the rank id in the world group.
|
|
259
|
-
"""
|
|
260
|
-
if context.get_context("mode") is context.PYNATIVE_MODE:
|
|
261
|
-
raise RuntimeError("The function 'get_world_rank_from_group_rank' is not supported in PYNATIVE_MODE, "
|
|
262
|
-
"'get_world_rank_from_group_rank' only support GRAPH_MODE")
|
|
263
|
-
check_group(group)
|
|
264
|
-
check_rank_id(group_rank_id)
|
|
265
|
-
c_group = c_str(group)
|
|
266
|
-
c_group_rank_id = ctypes.c_uint(group_rank_id)
|
|
267
|
-
c_world_rank_id = ctypes.c_uint()
|
|
268
|
-
ret = HCCL_LIB_CTYPES.HcomGetWorldRankFromGroupRank(c_group, c_group_rank_id, ctypes.byref(c_world_rank_id))
|
|
269
|
-
if ret != 0:
|
|
270
|
-
raise RuntimeError('Get world rank from group rank error.')
|
|
271
|
-
|
|
272
|
-
return c_world_rank_id.value
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
def get_group_rank_from_world_rank(world_rank_id, group):
|
|
276
|
-
"""
|
|
277
|
-
Get group rank from world rank.
|
|
278
|
-
|
|
279
|
-
A function that returns the rank id in the user group corresponding to the
|
|
280
|
-
rank which id is 'world_rank_id' in the world group.
|
|
281
|
-
|
|
282
|
-
Returns:
|
|
283
|
-
An integer scalar with the rank id in the user group.
|
|
284
|
-
"""
|
|
285
|
-
if context.get_context("mode") is context.PYNATIVE_MODE:
|
|
286
|
-
raise RuntimeError("The function 'get_group_rank_from_world_rank' is not supported in PYNATIVE_MODE, "
|
|
287
|
-
"'get_group_rank_from_world_rank' only support GRAPH_MODE")
|
|
288
|
-
check_group(group)
|
|
289
|
-
check_rank_id(world_rank_id)
|
|
290
|
-
c_group = c_str(group)
|
|
291
|
-
c_world_rank_id = ctypes.c_uint(world_rank_id)
|
|
292
|
-
c_group_rank_id = ctypes.c_uint()
|
|
293
|
-
ret = HCCL_LIB_CTYPES.HcomGetGroupRankFromWorldRank(c_world_rank_id, c_group, ctypes.byref(c_group_rank_id))
|
|
294
|
-
if ret != 0:
|
|
295
|
-
raise RuntimeError('Get group rank from world rank error.')
|
|
296
|
-
|
|
297
|
-
return c_group_rank_id.value
|