mindstudio-probe 1.0.1__py3-none-any.whl → 1.0.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {mindstudio_probe-1.0.1.dist-info → mindstudio_probe-1.0.4.dist-info}/LICENSE +201 -201
- {mindstudio_probe-1.0.1.dist-info → mindstudio_probe-1.0.4.dist-info}/METADATA +36 -30
- mindstudio_probe-1.0.4.dist-info/RECORD +276 -0
- {mindstudio_probe-1.0.1.dist-info → mindstudio_probe-1.0.4.dist-info}/WHEEL +1 -1
- {mindstudio_probe-1.0.1.dist-info → mindstudio_probe-1.0.4.dist-info}/entry_points.txt +1 -0
- msprobe/README.md +101 -182
- msprobe/__init__.py +1 -0
- msprobe/{config/config.json → config.json} +49 -27
- msprobe/core/__init__.py +0 -0
- msprobe/{pytorch → core}/advisor/advisor.py +124 -124
- msprobe/{pytorch → core}/advisor/advisor_const.py +59 -59
- msprobe/{pytorch → core}/advisor/advisor_result.py +58 -58
- msprobe/core/common/const.py +341 -241
- msprobe/core/common/exceptions.py +100 -88
- msprobe/core/common/{file_check.py → file_utils.py} +478 -265
- msprobe/core/common/log.py +76 -55
- msprobe/core/common/utils.py +385 -516
- msprobe/core/common_config.py +85 -58
- msprobe/core/compare/acc_compare.py +300 -0
- msprobe/core/compare/check.py +95 -0
- msprobe/core/compare/compare_cli.py +49 -0
- msprobe/core/compare/highlight.py +223 -0
- msprobe/core/compare/multiprocessing_compute.py +149 -0
- msprobe/{pytorch → core}/compare/npy_compare.py +295 -244
- msprobe/core/compare/utils.py +430 -0
- msprobe/core/data_dump/data_collector.py +154 -140
- msprobe/core/data_dump/data_processor/base.py +314 -245
- msprobe/core/data_dump/data_processor/factory.py +59 -61
- msprobe/core/data_dump/data_processor/mindspore_processor.py +186 -0
- msprobe/core/data_dump/data_processor/pytorch_processor.py +366 -346
- msprobe/core/data_dump/json_writer.py +96 -116
- msprobe/core/data_dump/scope.py +178 -178
- msprobe/core/grad_probe/__init__.py +0 -0
- msprobe/core/grad_probe/constant.py +71 -0
- msprobe/core/grad_probe/grad_compare.py +171 -0
- msprobe/core/grad_probe/utils.py +64 -0
- msprobe/docs/01.installation.md +89 -0
- msprobe/docs/02.config_introduction.md +165 -0
- msprobe/docs/03.config_examples.md +247 -0
- msprobe/docs/04.acl_config_examples.md +76 -0
- msprobe/docs/05.data_dump_PyTorch.md +198 -0
- msprobe/docs/06.data_dump_MindSpore.md +243 -0
- msprobe/docs/07.accuracy_checker_PyTorch.md +274 -0
- msprobe/docs/08.accuracy_checker_online_PyTorch.md +198 -0
- msprobe/docs/09.accuracy_checker_MindSpore.md +68 -0
- msprobe/docs/10.accuracy_compare_PyTorch.md +245 -0
- msprobe/docs/11.accuracy_compare_MindSpore.md +202 -0
- msprobe/docs/12.overflow_check_PyTorch.md +79 -0
- msprobe/docs/13.overflow_check_MindSpore.md +31 -0
- msprobe/{pytorch/doc/parse_tool.md → docs/14.data_parse_PyTorch.md} +283 -286
- msprobe/docs/15.free_benchmarking_PyTorch.md +164 -0
- msprobe/docs/17.grad_probe.md +207 -0
- msprobe/docs/FAQ_PyTorch.md +177 -0
- msprobe/docs/S02.report_free_benchmarking_validation_performance_baseline.md +146 -0
- msprobe/docs/img/free_benchmark_framework.png +0 -0
- msprobe/docs/img/grad_probe_image-1.png +0 -0
- msprobe/docs/img/grad_probe_image-2.png +0 -0
- msprobe/docs/img/grad_probe_image-3.png +0 -0
- msprobe/docs/img/grad_probe_image-4.png +0 -0
- msprobe/docs/img/grad_probe_image.png +0 -0
- msprobe/mindspore/__init__.py +1 -1
- msprobe/mindspore/api_accuracy_checker/__init__.py +0 -0
- msprobe/mindspore/api_accuracy_checker/api_accuracy_checker.py +255 -0
- msprobe/mindspore/api_accuracy_checker/api_info.py +69 -0
- msprobe/mindspore/api_accuracy_checker/api_runner.py +156 -0
- msprobe/mindspore/api_accuracy_checker/base_compare_algorithm.py +197 -0
- msprobe/mindspore/api_accuracy_checker/cmd_parser.py +6 -0
- msprobe/mindspore/api_accuracy_checker/compute_element.py +239 -0
- msprobe/mindspore/api_accuracy_checker/main.py +9 -0
- msprobe/mindspore/api_accuracy_checker/type_mapping.py +114 -0
- msprobe/mindspore/api_accuracy_checker/utils.py +80 -0
- msprobe/mindspore/cell_processor.py +34 -0
- msprobe/mindspore/common/const.py +106 -0
- msprobe/mindspore/common/log.py +38 -0
- msprobe/mindspore/common/utils.py +81 -0
- msprobe/mindspore/compare/distributed_compare.py +75 -0
- msprobe/mindspore/compare/ms_compare.py +219 -0
- msprobe/mindspore/compare/ms_graph_compare.py +348 -0
- msprobe/mindspore/compare/ms_to_pt_api.yaml +399 -0
- msprobe/mindspore/debugger/debugger_config.py +66 -51
- msprobe/mindspore/debugger/precision_debugger.py +126 -32
- msprobe/mindspore/dump/dump_tool_factory.py +35 -38
- msprobe/mindspore/dump/hook_cell/api_registry.py +118 -0
- msprobe/mindspore/dump/hook_cell/hook_cell.py +55 -0
- msprobe/mindspore/dump/hook_cell/support_wrap_ops.yaml +922 -0
- msprobe/mindspore/dump/hook_cell/wrap_api.py +113 -0
- msprobe/mindspore/dump/jit_dump.py +72 -0
- msprobe/mindspore/dump/kernel_graph_dump.py +59 -60
- msprobe/mindspore/dump/kernel_kbyk_dump.py +64 -0
- msprobe/mindspore/free_benchmark/__init__.py +0 -0
- msprobe/mindspore/free_benchmark/api_pynative_self_check.py +116 -0
- msprobe/mindspore/free_benchmark/common/__init__.py +0 -0
- msprobe/mindspore/free_benchmark/common/config.py +12 -0
- msprobe/mindspore/free_benchmark/common/handler_params.py +17 -0
- msprobe/mindspore/free_benchmark/common/utils.py +71 -0
- msprobe/mindspore/free_benchmark/data/support_wrap_ops.yaml +842 -0
- msprobe/mindspore/free_benchmark/decorator/__init__.py +0 -0
- msprobe/mindspore/free_benchmark/decorator/dec_forward.py +43 -0
- msprobe/mindspore/free_benchmark/decorator/decorator_factory.py +107 -0
- msprobe/mindspore/free_benchmark/handler/__init__.py +0 -0
- msprobe/mindspore/free_benchmark/handler/base_handler.py +90 -0
- msprobe/mindspore/free_benchmark/handler/check_handler.py +41 -0
- msprobe/mindspore/free_benchmark/handler/fix_handler.py +36 -0
- msprobe/mindspore/free_benchmark/handler/handler_factory.py +21 -0
- msprobe/mindspore/free_benchmark/perturbation/add_noise.py +67 -0
- msprobe/mindspore/free_benchmark/perturbation/base_perturbation.py +21 -0
- msprobe/mindspore/free_benchmark/perturbation/bit_noise.py +63 -0
- msprobe/mindspore/free_benchmark/perturbation/exchange_value.py +51 -0
- msprobe/mindspore/free_benchmark/perturbation/improve_precision.py +35 -0
- msprobe/mindspore/free_benchmark/perturbation/no_change.py +12 -0
- msprobe/mindspore/free_benchmark/perturbation/perturbation_factory.py +29 -0
- msprobe/mindspore/free_benchmark/self_check_tool_factory.py +33 -0
- msprobe/mindspore/grad_probe/__init__.py +0 -0
- msprobe/mindspore/grad_probe/global_context.py +90 -0
- msprobe/mindspore/grad_probe/grad_analyzer.py +231 -0
- msprobe/mindspore/grad_probe/grad_monitor.py +27 -0
- msprobe/mindspore/grad_probe/grad_stat_csv.py +132 -0
- msprobe/mindspore/grad_probe/hook.py +94 -0
- msprobe/mindspore/grad_probe/utils.py +30 -0
- msprobe/mindspore/ms_config.py +128 -78
- msprobe/mindspore/overflow_check/kernel_graph_overflow_check.py +44 -45
- msprobe/mindspore/overflow_check/overflow_check_tool_factory.py +34 -32
- msprobe/mindspore/runtime.py +4 -0
- msprobe/mindspore/service.py +378 -0
- msprobe/mindspore/task_handler_factory.py +24 -21
- msprobe/msprobe.py +105 -67
- msprobe/pytorch/__init__.py +4 -4
- msprobe/pytorch/api_accuracy_checker/common/config.py +53 -50
- msprobe/pytorch/api_accuracy_checker/common/utils.py +214 -224
- msprobe/pytorch/api_accuracy_checker/compare/algorithm.py +213 -216
- msprobe/pytorch/api_accuracy_checker/compare/api_precision_compare.py +606 -545
- msprobe/pytorch/api_accuracy_checker/compare/api_precision_standard.yaml +132 -132
- msprobe/pytorch/api_accuracy_checker/compare/api_precision_threshold.yaml +390 -390
- msprobe/pytorch/api_accuracy_checker/compare/compare.py +386 -345
- msprobe/pytorch/api_accuracy_checker/compare/compare_column.py +73 -73
- msprobe/pytorch/api_accuracy_checker/compare/compare_utils.py +245 -248
- msprobe/pytorch/api_accuracy_checker/config.yaml +10 -4
- msprobe/pytorch/api_accuracy_checker/run_ut/data_generate.py +335 -328
- msprobe/pytorch/api_accuracy_checker/run_ut/multi_run_ut.py +200 -203
- msprobe/pytorch/api_accuracy_checker/run_ut/run_overflow_check.py +133 -127
- msprobe/pytorch/api_accuracy_checker/run_ut/run_ut.py +592 -493
- msprobe/pytorch/api_accuracy_checker/run_ut/run_ut_utils.py +70 -7
- msprobe/pytorch/api_accuracy_checker/run_ut/torch_ut_setting.json +7 -4
- msprobe/pytorch/api_accuracy_checker/tensor_transport_layer/__init__.py +0 -0
- msprobe/pytorch/api_accuracy_checker/tensor_transport_layer/attl.py +197 -0
- msprobe/pytorch/api_accuracy_checker/tensor_transport_layer/client.py +325 -0
- msprobe/pytorch/api_accuracy_checker/tensor_transport_layer/device_dispatch.py +204 -0
- msprobe/pytorch/api_accuracy_checker/tensor_transport_layer/server.py +219 -0
- msprobe/pytorch/api_accuracy_checker/tensor_transport_layer/ssl_config.py +10 -0
- msprobe/pytorch/bench_functions/__init__.py +15 -0
- msprobe/pytorch/bench_functions/apply_adam_w.py +28 -0
- msprobe/pytorch/bench_functions/confusion_transpose.py +19 -0
- msprobe/pytorch/bench_functions/fast_gelu.py +55 -0
- msprobe/pytorch/bench_functions/layer_norm_eval.py +6 -0
- msprobe/pytorch/bench_functions/linear.py +12 -0
- msprobe/pytorch/bench_functions/matmul_backward.py +48 -0
- msprobe/pytorch/bench_functions/npu_fusion_attention.py +509 -0
- msprobe/pytorch/bench_functions/rms_norm.py +15 -0
- msprobe/pytorch/bench_functions/rotary_mul.py +52 -0
- msprobe/pytorch/bench_functions/scaled_mask_softmax.py +26 -0
- msprobe/pytorch/bench_functions/swiglu.py +55 -0
- msprobe/pytorch/common/__init__.py +2 -2
- msprobe/pytorch/common/compare_script.template +14 -14
- msprobe/pytorch/common/log.py +20 -31
- msprobe/pytorch/common/parse_json.py +39 -37
- msprobe/pytorch/common/utils.py +305 -224
- msprobe/pytorch/compare/distributed_compare.py +66 -111
- msprobe/pytorch/compare/mapping.yaml +607 -607
- msprobe/pytorch/compare/match.py +34 -36
- msprobe/pytorch/compare/pt_compare.py +50 -0
- msprobe/pytorch/debugger/debugger_config.py +95 -86
- msprobe/pytorch/debugger/precision_debugger.py +125 -95
- msprobe/pytorch/free_benchmark/__init__.py +8 -8
- msprobe/pytorch/free_benchmark/common/constant.py +70 -67
- msprobe/pytorch/free_benchmark/common/counter.py +71 -71
- msprobe/pytorch/free_benchmark/common/enums.py +37 -37
- msprobe/pytorch/free_benchmark/common/params.py +129 -129
- msprobe/pytorch/free_benchmark/common/utils.py +102 -98
- msprobe/pytorch/free_benchmark/compare/grad_saver.py +179 -183
- msprobe/pytorch/free_benchmark/compare/single_benchmark.py +104 -104
- msprobe/pytorch/free_benchmark/main.py +105 -102
- msprobe/pytorch/free_benchmark/perturbed_layers/base_layer.py +13 -13
- msprobe/pytorch/free_benchmark/perturbed_layers/layer_factory.py +41 -41
- msprobe/pytorch/free_benchmark/perturbed_layers/npu/add_noise.py +90 -90
- msprobe/pytorch/free_benchmark/perturbed_layers/npu/bit_noise.py +104 -104
- msprobe/pytorch/free_benchmark/perturbed_layers/npu/change_value.py +63 -63
- msprobe/pytorch/free_benchmark/perturbed_layers/npu/improve_precision.py +68 -68
- msprobe/pytorch/free_benchmark/perturbed_layers/npu/no_change.py +28 -28
- msprobe/pytorch/free_benchmark/perturbed_layers/npu/npu_base_layser.py +45 -45
- msprobe/pytorch/free_benchmark/perturbed_layers/run_cpu.py +19 -19
- msprobe/pytorch/free_benchmark/result_handlers/base_handler.py +217 -203
- msprobe/pytorch/free_benchmark/result_handlers/check_handler.py +39 -39
- msprobe/pytorch/free_benchmark/result_handlers/fix_handler.py +23 -23
- msprobe/pytorch/free_benchmark/result_handlers/handler_factory.py +30 -31
- msprobe/pytorch/free_benchmark/result_handlers/preheat_handler.py +170 -170
- msprobe/pytorch/function_factory.py +76 -0
- msprobe/pytorch/functional/dump_module.py +39 -39
- msprobe/pytorch/grad_probe/__init__.py +0 -0
- msprobe/pytorch/grad_probe/grad_monitor.py +91 -0
- msprobe/pytorch/grad_probe/grad_stat_csv.py +129 -0
- msprobe/pytorch/hook_module/api_registry.py +161 -161
- msprobe/pytorch/hook_module/hook_module.py +120 -109
- msprobe/pytorch/hook_module/support_wrap_ops.yaml +1879 -1876
- msprobe/pytorch/hook_module/utils.py +30 -29
- msprobe/pytorch/hook_module/wrap_aten.py +110 -100
- msprobe/pytorch/hook_module/wrap_distributed.py +78 -75
- msprobe/pytorch/hook_module/wrap_functional.py +105 -108
- msprobe/pytorch/hook_module/wrap_npu_custom.py +93 -73
- msprobe/pytorch/hook_module/wrap_tensor.py +71 -72
- msprobe/pytorch/hook_module/wrap_torch.py +86 -88
- msprobe/pytorch/hook_module/wrap_vf.py +62 -64
- msprobe/pytorch/module_processer.py +138 -98
- msprobe/pytorch/online_dispatch/__init__.py +20 -20
- msprobe/pytorch/online_dispatch/compare.py +236 -236
- msprobe/pytorch/online_dispatch/dispatch.py +271 -273
- msprobe/pytorch/online_dispatch/dump_compare.py +155 -186
- msprobe/pytorch/online_dispatch/single_compare.py +391 -391
- msprobe/pytorch/online_dispatch/torch_ops_config.yaml +49 -49
- msprobe/pytorch/online_dispatch/utils.py +130 -187
- msprobe/pytorch/parse.py +4 -4
- msprobe/pytorch/parse_tool/cli.py +32 -32
- msprobe/pytorch/parse_tool/lib/compare.py +260 -259
- msprobe/pytorch/parse_tool/lib/config.py +52 -51
- msprobe/pytorch/parse_tool/lib/file_desc.py +31 -31
- msprobe/pytorch/parse_tool/lib/interactive_cli.py +102 -102
- msprobe/pytorch/parse_tool/lib/parse_exception.py +54 -54
- msprobe/pytorch/parse_tool/lib/parse_tool.py +158 -158
- msprobe/pytorch/parse_tool/lib/utils.py +316 -367
- msprobe/pytorch/parse_tool/lib/visualization.py +85 -90
- msprobe/pytorch/pt_config.py +188 -93
- msprobe/pytorch/service.py +246 -167
- mindstudio_probe-1.0.1.dist-info/RECORD +0 -228
- msprobe/config/README.md +0 -397
- msprobe/mindspore/doc/dump.md +0 -65
- msprobe/mindspore/dump/api_kbk_dump.py +0 -55
- msprobe/pytorch/compare/acc_compare.py +0 -1024
- msprobe/pytorch/compare/highlight.py +0 -100
- msprobe/pytorch/doc/FAQ.md +0 -193
- msprobe/pytorch/doc/api_accuracy_checker.md +0 -269
- msprobe/pytorch/doc/atat/321/207/342/226/223/342/225/233/321/205/342/225/221/320/266/321/205/342/225/226/320/265/321/205/320/225/342/225/226/321/206/320/245/342/226/221/321/206/320/235/320/276dump/321/206/320/260/320/227/321/205/320/227/320/226/321/206/320/220/320/267/321/210/320/223/342/225/234/321/205/320/257/342/225/221/321/207/342/225/221/342/224/220/321/206/320/232/320/265/321/205/320/241/320/232.md +0 -182
- msprobe/pytorch/doc/dump.md +0 -207
- msprobe/pytorch/doc/ptdbg_ascend_compare.md +0 -176
- msprobe/pytorch/doc/ptdbg_ascend_overview.md +0 -68
- msprobe/pytorch/doc/ptdbg_ascend_quickstart.md +0 -381
- msprobe/pytorch/doc/run_overflow_check.md +0 -25
- msprobe/pytorch/doc//321/205/320/254/320/270/321/207/342/225/221/342/224/220/321/207/342/226/223/342/225/233/321/205/342/225/221/320/266/321/206/320/277/320/244/321/205/320/277/342/225/243.md +0 -90
- msprobe/test/core_ut/common/test_utils.py +0 -345
- msprobe/test/core_ut/data_dump/test_data_collector.py +0 -47
- msprobe/test/core_ut/data_dump/test_json_writer.py +0 -183
- msprobe/test/core_ut/data_dump/test_scope.py +0 -151
- msprobe/test/core_ut/test_common_config.py +0 -152
- msprobe/test/core_ut/test_file_check.py +0 -218
- msprobe/test/core_ut/test_log.py +0 -109
- msprobe/test/mindspore_ut/test_api_kbk_dump.py +0 -51
- msprobe/test/mindspore_ut/test_debugger_config.py +0 -42
- msprobe/test/mindspore_ut/test_dump_tool_factory.py +0 -51
- msprobe/test/mindspore_ut/test_kernel_graph_dump.py +0 -66
- msprobe/test/mindspore_ut/test_kernel_graph_overflow_check.py +0 -63
- msprobe/test/mindspore_ut/test_ms_config.py +0 -69
- msprobe/test/mindspore_ut/test_overflow_check_tool_factory.py +0 -51
- msprobe/test/mindspore_ut/test_precision_debugger.py +0 -56
- msprobe/test/mindspore_ut/test_task_handler_factory.py +0 -58
- msprobe/test/pytorch_ut/advisor/test_advisor.py +0 -83
- msprobe/test/pytorch_ut/api_accuracy_checker/common/test_common_utils.py +0 -108
- msprobe/test/pytorch_ut/api_accuracy_checker/common/test_config.py +0 -39
- msprobe/test/pytorch_ut/api_accuracy_checker/compare/test_algorithm.py +0 -112
- msprobe/test/pytorch_ut/api_accuracy_checker/compare/test_api_precision_compare.py +0 -77
- msprobe/test/pytorch_ut/api_accuracy_checker/compare/test_compare.py +0 -125
- msprobe/test/pytorch_ut/api_accuracy_checker/compare/test_compare_column.py +0 -10
- msprobe/test/pytorch_ut/api_accuracy_checker/compare/test_compare_utils.py +0 -43
- msprobe/test/pytorch_ut/api_accuracy_checker/run_ut/dump.json +0 -179
- msprobe/test/pytorch_ut/api_accuracy_checker/run_ut/forward.json +0 -63
- msprobe/test/pytorch_ut/api_accuracy_checker/run_ut/test_data_generate.py +0 -99
- msprobe/test/pytorch_ut/api_accuracy_checker/run_ut/test_multi_run_ut.py +0 -115
- msprobe/test/pytorch_ut/api_accuracy_checker/run_ut/test_run_ut.py +0 -72
- msprobe/test/pytorch_ut/compare/test_acc_compare.py +0 -17
- msprobe/test/pytorch_ut/free_benchmark/perturbed_layers/test_perturbed_layser.py +0 -105
- msprobe/test/pytorch_ut/free_benchmark/result_handlers/test_result_handler.py +0 -121
- msprobe/test/pytorch_ut/free_benchmark/test_main.py +0 -101
- msprobe/test/pytorch_ut/functional/test_dump_module.py +0 -15
- msprobe/test/pytorch_ut/hook_module/test_api_registry.py +0 -130
- msprobe/test/pytorch_ut/hook_module/test_hook_module.py +0 -42
- msprobe/test/pytorch_ut/hook_module/test_wrap_aten.py +0 -65
- msprobe/test/pytorch_ut/hook_module/test_wrap_distributed.py +0 -35
- msprobe/test/pytorch_ut/hook_module/test_wrap_functional.py +0 -20
- msprobe/test/pytorch_ut/hook_module/test_wrap_tensor.py +0 -35
- msprobe/test/pytorch_ut/hook_module/test_wrap_torch.py +0 -43
- msprobe/test/pytorch_ut/hook_module/test_wrap_vf.py +0 -11
- msprobe/test/pytorch_ut/test_pt_config.py +0 -69
- msprobe/test/pytorch_ut/test_service.py +0 -59
- msprobe/test/resources/advisor.txt +0 -3
- msprobe/test/resources/compare_result_20230703104808.csv +0 -9
- msprobe/test/resources/compare_result_without_accuracy.csv +0 -9
- msprobe/test/resources/config.yaml +0 -3
- msprobe/test/resources/npu_test.pkl +0 -8
- msprobe/test/run_test.sh +0 -30
- msprobe/test/run_ut.py +0 -58
- msprobe/test/test_module_processer.py +0 -64
- {mindstudio_probe-1.0.1.dist-info → mindstudio_probe-1.0.4.dist-info}/top_level.txt +0 -0
- /msprobe/{pytorch/doc → docs}/img/BLOOM-7B_1.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/BLOOM-7B_2.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/BLOOM-7B_3.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/BLOOM-7B_4.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/GPT-3_1.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/GPT-3_2.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/GPT-3_3.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/GPT-3_4.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/GPT-3_5.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/GPT-3_6.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/GPT-3_7.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/GPT-3_8.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/YOLOV5S_1.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/YOLOV5S_2.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/accuracy_checking_details.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/accuracy_checking_result.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/api_precision_compare_details.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/api_precision_compare_result.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/auto_analyze_log.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/compare_result_pkl.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/compare_result_pkl_md5.png.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/cpu_info.png +0 -0
- /msprobe/{config → docs}/img/free_benchmark.png +0 -0
- /msprobe/{pytorch/doc → docs}/img/module_compare.png +0 -0
|
@@ -0,0 +1,378 @@
|
|
|
1
|
+
# Copyright 2024 Huawei Technologies Co., Ltd
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ============================================================================
|
|
15
|
+
|
|
16
|
+
import os
|
|
17
|
+
import copy
|
|
18
|
+
import functools
|
|
19
|
+
from collections import defaultdict
|
|
20
|
+
|
|
21
|
+
import mindspore as ms
|
|
22
|
+
from mindspore.common.tensor import Tensor
|
|
23
|
+
from mindspore import ops
|
|
24
|
+
from mindspore import nn
|
|
25
|
+
try:
|
|
26
|
+
from mindspore.common._pijit_context import PIJitCaptureContext
|
|
27
|
+
pijit_label = True
|
|
28
|
+
except ImportError:
|
|
29
|
+
pijit_label = False
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
from msprobe.core.data_dump.data_collector import build_data_collector
|
|
33
|
+
from msprobe.core.data_dump.scope import BaseScope
|
|
34
|
+
from msprobe.mindspore.common.utils import get_rank_if_initialized
|
|
35
|
+
from msprobe.core.common.file_utils import create_directory
|
|
36
|
+
from msprobe.mindspore.common.log import logger
|
|
37
|
+
from msprobe.core.common.utils import Const
|
|
38
|
+
from msprobe.core.common.exceptions import DistributedNotInitializedError
|
|
39
|
+
from msprobe.mindspore.dump.hook_cell.api_registry import api_register
|
|
40
|
+
from msprobe.core.data_dump.data_processor.base import ModuleBackwardInputsOutputs, ModuleForwardInputsOutputs, \
|
|
41
|
+
ModuleBackwardInputs, ModuleBackwardOutputs
|
|
42
|
+
from msprobe.core.common.exceptions import MsprobeException
|
|
43
|
+
from msprobe.mindspore.dump.hook_cell.hook_cell import HOOKCell
|
|
44
|
+
from msprobe.mindspore.cell_processor import CellProcessor
|
|
45
|
+
from msprobe.mindspore.dump.jit_dump import JitDump
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class Service:
|
|
49
|
+
def __init__(self, config):
|
|
50
|
+
self.model = None
|
|
51
|
+
self.config = copy.deepcopy(config)
|
|
52
|
+
self.config.level = self.config.level_ori
|
|
53
|
+
self.data_collector = build_data_collector(self.config)
|
|
54
|
+
self.cell_processor = CellProcessor(self.data_collector.scope)
|
|
55
|
+
self.switch = False
|
|
56
|
+
self.current_iter = 0
|
|
57
|
+
self.first_start = True
|
|
58
|
+
self.current_rank = None
|
|
59
|
+
self.primitive_counters = {}
|
|
60
|
+
self.dump_iter_dir = None
|
|
61
|
+
self.start_call = False
|
|
62
|
+
self.check_level_valid()
|
|
63
|
+
self.should_stop_service = False
|
|
64
|
+
|
|
65
|
+
@staticmethod
|
|
66
|
+
def check_model_valid(model):
|
|
67
|
+
if not model or isinstance(model, nn.Cell):
|
|
68
|
+
return model
|
|
69
|
+
raise MsprobeException(
|
|
70
|
+
MsprobeException.INVALID_PARAM_ERROR, "model 参数必须是 mindspore.nn.Cell 类型。"
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
def check_level_valid(self):
|
|
74
|
+
if self.config.level == "L2":
|
|
75
|
+
raise MsprobeException(
|
|
76
|
+
MsprobeException.INVALID_PARAM_ERROR, "L2 level dump function is currently not supported."
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
def build_hook(self, target_type, name):
|
|
80
|
+
def forward_hook(api_or_cell_name, cell, input, output):
|
|
81
|
+
if not self.should_excute_hook():
|
|
82
|
+
return None
|
|
83
|
+
|
|
84
|
+
if target_type == BaseScope.Module_Type_Module:
|
|
85
|
+
api_or_cell_name = cell.mindstudio_reserved_name
|
|
86
|
+
module_input_output = ModuleForwardInputsOutputs(args=input, kwargs={}, output=output)
|
|
87
|
+
else:
|
|
88
|
+
module_input_output = ModuleForwardInputsOutputs(args=input, kwargs=cell.input_kwargs,
|
|
89
|
+
output=output)
|
|
90
|
+
|
|
91
|
+
self.data_collector.update_api_or_module_name(api_or_cell_name)
|
|
92
|
+
self.data_collector.forward_data_collect(api_or_cell_name, cell, pid, module_input_output)
|
|
93
|
+
if self.data_collector.if_return_forward_new_output():
|
|
94
|
+
return self.data_collector.get_forward_new_output()
|
|
95
|
+
if target_type == BaseScope.Module_Type_API:
|
|
96
|
+
del cell.input_kwargs
|
|
97
|
+
return output
|
|
98
|
+
|
|
99
|
+
def backward_hook(api_or_cell_name, cell, grad_input, grad_output):
|
|
100
|
+
if not self.should_excute_hook():
|
|
101
|
+
return
|
|
102
|
+
|
|
103
|
+
if target_type == BaseScope.Module_Type_Module:
|
|
104
|
+
api_or_cell_name = cell.mindstudio_reserved_name
|
|
105
|
+
self.data_collector.update_api_or_module_name(api_or_cell_name)
|
|
106
|
+
if self.data_collector:
|
|
107
|
+
# 框架最新接口变更,grad_input和grad_output的含义发生了变化,与torch含义保持一致,因此此处调换顺序传入
|
|
108
|
+
module_input_output = ModuleBackwardInputsOutputs(grad_input=grad_output, grad_output=grad_input)
|
|
109
|
+
self.data_collector.backward_data_collect(api_or_cell_name, cell, pid, module_input_output)
|
|
110
|
+
|
|
111
|
+
pid = os.getpid()
|
|
112
|
+
forward_name_template = name + Const.FORWARD
|
|
113
|
+
backward_name_template = name + Const.BACKWARD
|
|
114
|
+
forward_hook = functools.partial(forward_hook, forward_name_template)
|
|
115
|
+
backward_hook = functools.partial(backward_hook, backward_name_template)
|
|
116
|
+
|
|
117
|
+
def wrap_forward_hook(cell, input, output):
|
|
118
|
+
return forward_hook(cell, input, output)
|
|
119
|
+
|
|
120
|
+
def wrap_backward_hook(cell, grad_input, grad_output):
|
|
121
|
+
return backward_hook(cell, grad_input, grad_output)
|
|
122
|
+
|
|
123
|
+
return wrap_forward_hook, wrap_backward_hook
|
|
124
|
+
|
|
125
|
+
def wrap_primitive(self, origin_func, primitive_name):
|
|
126
|
+
service_instance = self
|
|
127
|
+
|
|
128
|
+
def create_backward_hook(captured_grads, num_tensors, updated_primitive_name, hook_type):
|
|
129
|
+
def backward_hook(grad):
|
|
130
|
+
captured_grads.append(grad)
|
|
131
|
+
backward_primitive_name = f"{updated_primitive_name}.{Const.BACKWARD}"
|
|
132
|
+
try:
|
|
133
|
+
if len(captured_grads) == num_tensors and hook_type == Const.INPUT:
|
|
134
|
+
service_instance.data_collector.update_api_or_module_name(backward_primitive_name)
|
|
135
|
+
new_module_input_output = ModuleBackwardOutputs(grad_output=tuple(captured_grads))
|
|
136
|
+
service_instance.data_collector.backward_output_data_collect(
|
|
137
|
+
backward_primitive_name, service_instance, os.getpid(), new_module_input_output
|
|
138
|
+
)
|
|
139
|
+
captured_grads.clear()
|
|
140
|
+
elif len(captured_grads) == num_tensors and hook_type == Const.OUTPUT:
|
|
141
|
+
service_instance.data_collector.update_api_or_module_name(backward_primitive_name)
|
|
142
|
+
new_module_input_output = ModuleBackwardInputs(grad_input=tuple(captured_grads))
|
|
143
|
+
service_instance.data_collector.backward_input_data_collect(
|
|
144
|
+
backward_primitive_name, service_instance, os.getpid(), new_module_input_output
|
|
145
|
+
)
|
|
146
|
+
captured_grads.clear()
|
|
147
|
+
|
|
148
|
+
except Exception as exception:
|
|
149
|
+
raise Exception(f"This is a primitive op {hook_type}_backward dump error: {exception},"
|
|
150
|
+
f" updated_primitive_name: {updated_primitive_name}") from exception
|
|
151
|
+
|
|
152
|
+
return backward_hook
|
|
153
|
+
|
|
154
|
+
def hook_primitive_inputs(args, captured_grads_input, updated_primitive_name):
|
|
155
|
+
hooked_inputs = []
|
|
156
|
+
num_tensors = sum(isinstance(arg, Tensor) for arg in args)
|
|
157
|
+
input_backward_hook = create_backward_hook(captured_grads_input, num_tensors, updated_primitive_name,
|
|
158
|
+
Const.INPUT)
|
|
159
|
+
for _, arg in enumerate(args):
|
|
160
|
+
if isinstance(arg, Tensor):
|
|
161
|
+
arg_hooked = ops.HookBackward(input_backward_hook)(arg)
|
|
162
|
+
hooked_inputs.append(arg_hooked)
|
|
163
|
+
else:
|
|
164
|
+
hooked_inputs.append(arg)
|
|
165
|
+
return hooked_inputs
|
|
166
|
+
|
|
167
|
+
def hook_primitive_outputs(out, captured_grads_output, updated_primitive_name):
|
|
168
|
+
if isinstance(out, tuple):
|
|
169
|
+
num_output_tensors = sum(isinstance(tensor, Tensor) for tensor in out)
|
|
170
|
+
else:
|
|
171
|
+
num_output_tensors = 1
|
|
172
|
+
output_backward_hook = create_backward_hook(captured_grads_output, num_output_tensors,
|
|
173
|
+
updated_primitive_name, Const.OUTPUT)
|
|
174
|
+
|
|
175
|
+
if isinstance(out, Tensor):
|
|
176
|
+
return ops.HookBackward(output_backward_hook)(out)
|
|
177
|
+
elif isinstance(out, tuple):
|
|
178
|
+
hooked_outputs = []
|
|
179
|
+
for tensor in out:
|
|
180
|
+
if isinstance(tensor, Tensor):
|
|
181
|
+
hooked_outputs.append(ops.HookBackward(output_backward_hook)(tensor))
|
|
182
|
+
else:
|
|
183
|
+
hooked_outputs.append(tensor)
|
|
184
|
+
return tuple(hooked_outputs)
|
|
185
|
+
return out
|
|
186
|
+
|
|
187
|
+
def wrapped_primitive_call(instance_self, *args, **kwargs):
|
|
188
|
+
service_instance.update_primitive_counters(primitive_name)
|
|
189
|
+
current_count = service_instance.primitive_counters.get(primitive_name, 0)
|
|
190
|
+
updated_primitive_name = f"{Const.PRIMITIVE_PREFIX}.{primitive_name}.{current_count}"
|
|
191
|
+
|
|
192
|
+
if not service_instance.switch:
|
|
193
|
+
return origin_func(*args, **kwargs)
|
|
194
|
+
|
|
195
|
+
captured_grads_input, captured_grads_output = [], []
|
|
196
|
+
|
|
197
|
+
try:
|
|
198
|
+
hooked_inputs = hook_primitive_inputs(args, captured_grads_input, updated_primitive_name)
|
|
199
|
+
except Exception as exception:
|
|
200
|
+
raise Exception("This is a primitive op dump error during input hooking: {},"
|
|
201
|
+
" primitive_name: {}".format(exception, primitive_name)) from exception
|
|
202
|
+
|
|
203
|
+
try:
|
|
204
|
+
out = origin_func(*hooked_inputs, **kwargs)
|
|
205
|
+
except Exception as exception:
|
|
206
|
+
raise Exception("This is a primitive op dump error during function call: {},"
|
|
207
|
+
" primitive_name: {}".format(exception, primitive_name)) from exception
|
|
208
|
+
|
|
209
|
+
forward_primitive_name = f"{updated_primitive_name}.{Const.FORWARD}"
|
|
210
|
+
service_instance.data_collector.update_api_or_module_name(forward_primitive_name)
|
|
211
|
+
if service_instance.data_collector:
|
|
212
|
+
module_input_output = ModuleForwardInputsOutputs(args=hooked_inputs, kwargs=kwargs, output=out)
|
|
213
|
+
try:
|
|
214
|
+
service_instance.data_collector.forward_data_collect(forward_primitive_name, instance_self,
|
|
215
|
+
os.getpid(), module_input_output)
|
|
216
|
+
except Exception as exception:
|
|
217
|
+
raise Exception("This is a primitive op dump error during forward data collection: {},"
|
|
218
|
+
" primitive_name: {}".format(exception, primitive_name)) from exception
|
|
219
|
+
|
|
220
|
+
if service_instance.data_collector.if_return_forward_new_output():
|
|
221
|
+
out = service_instance.data_collector.get_forward_new_output()
|
|
222
|
+
|
|
223
|
+
try:
|
|
224
|
+
out = hook_primitive_outputs(out, captured_grads_output, updated_primitive_name)
|
|
225
|
+
except Exception as exception:
|
|
226
|
+
raise Exception("This is a primitive op dump error during output hooking: {},"
|
|
227
|
+
" primitive_name: {}".format(exception, primitive_name)) from exception
|
|
228
|
+
|
|
229
|
+
return out
|
|
230
|
+
|
|
231
|
+
return wrapped_primitive_call
|
|
232
|
+
|
|
233
|
+
def update_primitive_counters(self, primitive_name):
|
|
234
|
+
if primitive_name not in self.primitive_counters:
|
|
235
|
+
self.primitive_counters[primitive_name] = 0
|
|
236
|
+
else:
|
|
237
|
+
self.primitive_counters[primitive_name] += 1
|
|
238
|
+
|
|
239
|
+
def register_hooks(self):
|
|
240
|
+
primitive_set = set()
|
|
241
|
+
for _, cell in self.model.cells_and_names():
|
|
242
|
+
for pname, primitive in cell._primitives.items():
|
|
243
|
+
primitive_set.add((pname, primitive))
|
|
244
|
+
|
|
245
|
+
for pname, primitive in primitive_set:
|
|
246
|
+
NewPrimitive = type('NewPrimitive', (primitive.__class__,),
|
|
247
|
+
{'__call__': self.wrap_primitive(primitive.__call__, pname)})
|
|
248
|
+
primitive.__class__ = NewPrimitive
|
|
249
|
+
|
|
250
|
+
def step(self):
|
|
251
|
+
self.current_iter += 1
|
|
252
|
+
self.data_collector.update_iter(self.current_iter)
|
|
253
|
+
HOOKCell.cell_count = defaultdict(int)
|
|
254
|
+
CellProcessor.cell_count = {}
|
|
255
|
+
self.primitive_counters.clear()
|
|
256
|
+
|
|
257
|
+
def start(self, model=None):
|
|
258
|
+
self.start_call = True
|
|
259
|
+
if self.should_stop_service:
|
|
260
|
+
return
|
|
261
|
+
if self.need_end_service():
|
|
262
|
+
api_register.api_set_ori_func()
|
|
263
|
+
self.should_stop_service = True
|
|
264
|
+
self.switch = False
|
|
265
|
+
logger.info("************************************************")
|
|
266
|
+
logger.info(f"* {Const.TOOL_NAME} ends successfully. *")
|
|
267
|
+
logger.info("************************************************")
|
|
268
|
+
return
|
|
269
|
+
if self.config.step and self.current_iter not in self.config.step:
|
|
270
|
+
return
|
|
271
|
+
self.model = self.check_model_valid(model)
|
|
272
|
+
|
|
273
|
+
logger.info(f"{Const.TOOL_NAME}: debugger.start() is set successfully")
|
|
274
|
+
|
|
275
|
+
if self.first_start:
|
|
276
|
+
try:
|
|
277
|
+
self.current_rank = get_rank_if_initialized()
|
|
278
|
+
except DistributedNotInitializedError:
|
|
279
|
+
self.current_rank = None
|
|
280
|
+
|
|
281
|
+
if self.config.rank and self.current_rank not in self.config.rank:
|
|
282
|
+
return
|
|
283
|
+
self.register_hook_new()
|
|
284
|
+
if self.config.level == "L1":
|
|
285
|
+
JitDump.set_config(self.config)
|
|
286
|
+
JitDump.set_data_collector(self.data_collector)
|
|
287
|
+
ms.common.api._MindsporeFunctionExecutor = JitDump
|
|
288
|
+
ms.common.api._PyNativeExecutor.grad = JitDump.grad
|
|
289
|
+
if pijit_label:
|
|
290
|
+
PIJitCaptureContext.__enter__ = self.empty
|
|
291
|
+
PIJitCaptureContext.__exit__ = self.empty
|
|
292
|
+
self.first_start = False
|
|
293
|
+
|
|
294
|
+
self.switch = True
|
|
295
|
+
logger.info(f"Dump switch is turned on at step {self.current_iter}. ")
|
|
296
|
+
self.create_dirs()
|
|
297
|
+
logger.info(f"Dump data will be saved in {self.dump_iter_dir}.")
|
|
298
|
+
|
|
299
|
+
def stop(self):
|
|
300
|
+
if self.should_stop_service:
|
|
301
|
+
return
|
|
302
|
+
logger.info(f"{Const.TOOL_NAME}: debugger.stop() is set successfully. "
|
|
303
|
+
"Please set debugger.start() to turn on the dump switch again. ")
|
|
304
|
+
if not self.start_call:
|
|
305
|
+
logger.error(f"{Const.TOOL_NAME}: debugger.start() is not set in the current scope.")
|
|
306
|
+
raise Exception("debugger.start() is not set in the current scope.")
|
|
307
|
+
if self.config.step and self.current_iter not in self.config.step:
|
|
308
|
+
return
|
|
309
|
+
if self.config.rank and self.current_rank not in self.config.rank:
|
|
310
|
+
return
|
|
311
|
+
self.switch = False
|
|
312
|
+
self.start_call = False
|
|
313
|
+
self.data_collector.write_json()
|
|
314
|
+
|
|
315
|
+
def need_end_service(self):
|
|
316
|
+
if self.config.step and self.current_iter > max(self.config.step):
|
|
317
|
+
return True
|
|
318
|
+
if self.data_collector and self.data_collector.data_processor.is_terminated:
|
|
319
|
+
return True
|
|
320
|
+
return False
|
|
321
|
+
|
|
322
|
+
def should_excute_hook(self):
|
|
323
|
+
if not self.switch:
|
|
324
|
+
return False
|
|
325
|
+
if not self.data_collector or self.data_collector.data_processor.is_terminated:
|
|
326
|
+
return False
|
|
327
|
+
return True
|
|
328
|
+
|
|
329
|
+
def create_dirs(self):
|
|
330
|
+
create_directory(self.config.dump_path)
|
|
331
|
+
self.dump_iter_dir = os.path.join(self.config.dump_path, f"step{self.current_iter}")
|
|
332
|
+
cur_rank = self.current_rank if self.current_rank is not None else ''
|
|
333
|
+
dump_dir = os.path.join(self.dump_iter_dir, f"rank{cur_rank}")
|
|
334
|
+
create_directory(dump_dir)
|
|
335
|
+
if self.config.task in self.data_collector.tasks_need_tensor_data:
|
|
336
|
+
dump_data_dir = os.path.join(dump_dir, "dump_tensor_data")
|
|
337
|
+
create_directory(dump_data_dir)
|
|
338
|
+
else:
|
|
339
|
+
dump_data_dir = None
|
|
340
|
+
|
|
341
|
+
dump_file_path = os.path.join(dump_dir, "dump.json")
|
|
342
|
+
stack_file_path = os.path.join(dump_dir, "stack.json")
|
|
343
|
+
construct_file_path = os.path.join(dump_dir, "construct.json")
|
|
344
|
+
self.data_collector.update_dump_paths(
|
|
345
|
+
dump_file_path, stack_file_path, construct_file_path, dump_data_dir, None)
|
|
346
|
+
|
|
347
|
+
def empty(self, *args, **kwargs):
|
|
348
|
+
pass
|
|
349
|
+
|
|
350
|
+
def register_hook_new(self):
|
|
351
|
+
logger.info("The {} hook function is successfully mounted to the model.".format(self.config.task))
|
|
352
|
+
if self.config.level == "L1":
|
|
353
|
+
api_register.initialize_hook(functools.partial(self.build_hook, BaseScope.Module_Type_API))
|
|
354
|
+
api_register.api_set_hook_func()
|
|
355
|
+
if self.model:
|
|
356
|
+
self.register_hooks()
|
|
357
|
+
|
|
358
|
+
if self.config.level == "L0":
|
|
359
|
+
if not self.model:
|
|
360
|
+
raise MsprobeException(MsprobeException.INVALID_PARAM_ERROR,
|
|
361
|
+
"The current level is L0, the model cannot be None")
|
|
362
|
+
for name, cell in self.model.cells_and_names():
|
|
363
|
+
if cell == self.model:
|
|
364
|
+
continue
|
|
365
|
+
prefix = 'Cell' + Const.SEP + name + Const.SEP + \
|
|
366
|
+
cell.__class__.__name__ + Const.SEP
|
|
367
|
+
forward_hook, backward_hook = self.build_hook(BaseScope.Module_Type_Module, prefix)
|
|
368
|
+
cell.register_forward_hook(forward_hook)
|
|
369
|
+
cell.register_backward_hook(backward_hook)
|
|
370
|
+
|
|
371
|
+
cell.register_forward_pre_hook(
|
|
372
|
+
self.cell_processor.node_hook(prefix + Const.FORWARD, Const.START))
|
|
373
|
+
cell.register_forward_hook(
|
|
374
|
+
self.cell_processor.node_hook(prefix + Const.FORWARD, Const.STOP))
|
|
375
|
+
cell.register_backward_pre_hook(
|
|
376
|
+
self.cell_processor.node_hook(prefix + Const.BACKWARD, Const.START))
|
|
377
|
+
cell.register_backward_hook(
|
|
378
|
+
self.cell_processor.node_hook(prefix + Const.BACKWARD, Const.STOP))
|
|
@@ -1,21 +1,24 @@
|
|
|
1
|
-
from msprobe.
|
|
2
|
-
from msprobe.mindspore.
|
|
3
|
-
from msprobe.mindspore.
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
if not
|
|
20
|
-
raise Exception("
|
|
21
|
-
|
|
1
|
+
from msprobe.core.common.const import Const
|
|
2
|
+
from msprobe.mindspore.debugger.debugger_config import DebuggerConfig
|
|
3
|
+
from msprobe.mindspore.dump.dump_tool_factory import DumpToolFactory
|
|
4
|
+
from msprobe.mindspore.overflow_check.overflow_check_tool_factory import OverflowCheckToolFactory
|
|
5
|
+
from msprobe.mindspore.free_benchmark.self_check_tool_factory import SelfCheckToolFactory
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class TaskHandlerFactory:
|
|
9
|
+
tasks = {
|
|
10
|
+
Const.TENSOR: DumpToolFactory,
|
|
11
|
+
Const.STATISTICS: DumpToolFactory,
|
|
12
|
+
Const.OVERFLOW_CHECK: OverflowCheckToolFactory,
|
|
13
|
+
Const.FREE_BENCHMARK: SelfCheckToolFactory
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
@staticmethod
|
|
17
|
+
def create(config: DebuggerConfig):
|
|
18
|
+
task = TaskHandlerFactory.tasks.get(config.task)
|
|
19
|
+
if not task:
|
|
20
|
+
raise Exception("Valid task is needed.")
|
|
21
|
+
handler = task.create(config)
|
|
22
|
+
if not handler:
|
|
23
|
+
raise Exception("Can not find task handler")
|
|
24
|
+
return handler
|
msprobe/msprobe.py
CHANGED
|
@@ -1,67 +1,105 @@
|
|
|
1
|
-
# Copyright (c) 2024, Huawei Technologies Co., Ltd.
|
|
2
|
-
# All rights reserved.
|
|
3
|
-
#
|
|
4
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
-
# you may not use this file except in compliance with the License.
|
|
6
|
-
# You may obtain a copy of the License at
|
|
7
|
-
#
|
|
8
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
-
#
|
|
10
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
-
# See the License for the specific language governing permissions and
|
|
14
|
-
# limitations under the License.
|
|
15
|
-
|
|
16
|
-
import argparse
|
|
17
|
-
import sys
|
|
18
|
-
|
|
19
|
-
from msprobe.
|
|
20
|
-
from msprobe.
|
|
21
|
-
from msprobe.
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
multi_run_ut_cmd_parser.
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
1
|
+
# Copyright (c) 2024, Huawei Technologies Co., Ltd.
|
|
2
|
+
# All rights reserved.
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
|
|
16
|
+
import argparse
|
|
17
|
+
import sys
|
|
18
|
+
import importlib.util
|
|
19
|
+
from msprobe.core.compare.utils import _compare_parser
|
|
20
|
+
from msprobe.core.common.log import logger
|
|
21
|
+
from msprobe.core.compare.compare_cli import compare_cli
|
|
22
|
+
from msprobe.core.common.const import Const
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def is_module_available(module_name):
|
|
26
|
+
spec = importlib.util.find_spec(module_name)
|
|
27
|
+
return spec is not None
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def main():
|
|
31
|
+
parser = argparse.ArgumentParser(
|
|
32
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
33
|
+
description="msprobe(mindstudio probe), [Powered by MindStudio].\n"
|
|
34
|
+
"Providing one-site accuracy difference debugging toolkit for training on Ascend Devices.\n"
|
|
35
|
+
f"For any issue, refer README.md first",
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
parser.set_defaults(print_help=parser.print_help)
|
|
39
|
+
parser.add_argument('-f', '--framework', required=True, choices=[Const.PT_FRAMEWORK, Const.MS_FRAMEWORK],
|
|
40
|
+
help='Deep learning framework.')
|
|
41
|
+
subparsers = parser.add_subparsers()
|
|
42
|
+
subparsers.add_parser('parse')
|
|
43
|
+
compare_cmd_parser = subparsers.add_parser('compare')
|
|
44
|
+
run_ut_cmd_parser = subparsers.add_parser('run_ut')
|
|
45
|
+
multi_run_ut_cmd_parser = subparsers.add_parser('multi_run_ut')
|
|
46
|
+
api_precision_compare_cmd_parser = subparsers.add_parser('api_precision_compare')
|
|
47
|
+
run_overflow_check_cmd_parser = subparsers.add_parser('run_overflow_check')
|
|
48
|
+
_compare_parser(compare_cmd_parser)
|
|
49
|
+
is_torch_available=is_module_available("torch")
|
|
50
|
+
is_mindspore_available = is_module_available("mindspore")
|
|
51
|
+
if is_torch_available:
|
|
52
|
+
from msprobe.pytorch.api_accuracy_checker.run_ut.run_ut import _run_ut_parser, run_ut_command
|
|
53
|
+
from msprobe.pytorch.parse_tool.cli import parse as cli_parse
|
|
54
|
+
from msprobe.pytorch.api_accuracy_checker.run_ut.multi_run_ut import prepare_config, run_parallel_ut
|
|
55
|
+
from msprobe.pytorch.api_accuracy_checker.compare.api_precision_compare import _api_precision_compare_parser, \
|
|
56
|
+
_api_precision_compare_command
|
|
57
|
+
from msprobe.pytorch.api_accuracy_checker.run_ut.run_overflow_check import _run_overflow_check_parser, \
|
|
58
|
+
_run_overflow_check_command
|
|
59
|
+
|
|
60
|
+
_run_ut_parser(run_ut_cmd_parser)
|
|
61
|
+
_run_ut_parser(multi_run_ut_cmd_parser)
|
|
62
|
+
multi_run_ut_cmd_parser.add_argument('-n', '--num_splits', type=int, choices=range(1, 65), default=8,
|
|
63
|
+
help='Number of splits for parallel processing. Range: 1-64')
|
|
64
|
+
_api_precision_compare_parser(api_precision_compare_cmd_parser)
|
|
65
|
+
_run_overflow_check_parser(run_overflow_check_cmd_parser)
|
|
66
|
+
elif is_mindspore_available:
|
|
67
|
+
from msprobe.mindspore.api_accuracy_checker.cmd_parser import add_api_accuracy_checker_argument
|
|
68
|
+
add_api_accuracy_checker_argument(run_ut_cmd_parser)
|
|
69
|
+
|
|
70
|
+
if len(sys.argv) == 1:
|
|
71
|
+
parser.print_help()
|
|
72
|
+
sys.exit(0)
|
|
73
|
+
args = parser.parse_args(sys.argv[1:])
|
|
74
|
+
if sys.argv[2] == Const.PT_FRAMEWORK:
|
|
75
|
+
if not is_torch_available:
|
|
76
|
+
logger.error("PyTorch does not exist, please install PyTorch library")
|
|
77
|
+
raise Exception("PyTorch does not exist, please install PyTorch library")
|
|
78
|
+
if sys.argv[3] == "run_ut":
|
|
79
|
+
run_ut_command(args)
|
|
80
|
+
elif sys.argv[3] == "parse":
|
|
81
|
+
cli_parse()
|
|
82
|
+
elif sys.argv[3] == "multi_run_ut":
|
|
83
|
+
config = prepare_config(args)
|
|
84
|
+
run_parallel_ut(config)
|
|
85
|
+
elif sys.argv[3] == "api_precision_compare":
|
|
86
|
+
_api_precision_compare_command(args)
|
|
87
|
+
elif sys.argv[3] == "run_overflow_check":
|
|
88
|
+
_run_overflow_check_command(args)
|
|
89
|
+
elif sys.argv[3] == "compare":
|
|
90
|
+
if args.cell_mapping is not None or args.api_mapping is not None:
|
|
91
|
+
logger.error("Argument -cm or -am is not supported in PyTorch framework")
|
|
92
|
+
raise Exception("Argument -cm or -am is not supported in PyTorch framework")
|
|
93
|
+
compare_cli(args)
|
|
94
|
+
else:
|
|
95
|
+
if not is_module_available(Const.MS_FRAMEWORK):
|
|
96
|
+
logger.error("MindSpore does not exist, please install MindSpore library")
|
|
97
|
+
raise Exception("MindSpore does not exist, please install MindSpore library")
|
|
98
|
+
if sys.argv[3] == "compare":
|
|
99
|
+
compare_cli(args)
|
|
100
|
+
elif sys.argv[3] == "run_ut":
|
|
101
|
+
from msprobe.mindspore.api_accuracy_checker.main import api_checker_main
|
|
102
|
+
api_checker_main(args)
|
|
103
|
+
|
|
104
|
+
if __name__ == "__main__":
|
|
105
|
+
main()
|
msprobe/pytorch/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from .debugger.precision_debugger import PrecisionDebugger
|
|
2
|
-
from .common.utils import seed_all
|
|
3
|
-
from .compare.
|
|
4
|
-
from .compare.
|
|
1
|
+
from .debugger.precision_debugger import PrecisionDebugger
|
|
2
|
+
from .common.utils import seed_all
|
|
3
|
+
from .compare.distributed_compare import compare_distributed
|
|
4
|
+
from .compare.pt_compare import compare
|