mindspore 2.4.1__cp310-cp310-win_amd64.whl → 2.5.0__cp310-cp310-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/__init__.py +8 -3
- mindspore/_c_dataengine.cp310-win_amd64.pyd +0 -0
- mindspore/_c_expression.cp310-win_amd64.pyd +0 -0
- mindspore/_c_mindrecord.cp310-win_amd64.pyd +0 -0
- mindspore/_checkparam.py +0 -5
- mindspore/_extends/parallel_compile/akg_compiler/gen_custom_op_files.py +1 -1
- mindspore/_extends/parse/compile_config.py +64 -0
- mindspore/_extends/parse/deprecated/__init__.py +0 -0
- mindspore/_extends/parse/deprecated/deprecated_tensor_method.py +375 -0
- mindspore/_extends/parse/parser.py +23 -5
- mindspore/_extends/parse/standard_method.py +123 -27
- mindspore/_extends/pijit/pijit_func_white_list.py +1 -1
- mindspore/amp.py +7 -1
- mindspore/avcodec-59.dll +0 -0
- mindspore/avdevice-59.dll +0 -0
- mindspore/avfilter-8.dll +0 -0
- mindspore/avformat-59.dll +0 -0
- mindspore/avutil-57.dll +0 -0
- mindspore/boost/boost_cell_wrapper.py +136 -41
- mindspore/common/__init__.py +3 -1
- mindspore/common/_register_for_tensor.py +0 -1
- mindspore/common/_stub_tensor.py +25 -4
- mindspore/common/_tensor_cpp_method.py +17 -0
- mindspore/common/_tensor_docs.py +6132 -0
- mindspore/common/api.py +99 -25
- mindspore/common/dtype.py +34 -34
- mindspore/common/dump.py +2 -1
- mindspore/common/file_system.py +8 -1
- mindspore/common/generator.py +2 -0
- mindspore/common/hook_handle.py +3 -1
- mindspore/common/initializer.py +3 -4
- mindspore/common/lazy_inline.py +8 -2
- mindspore/common/mindir_util.py +10 -2
- mindspore/common/parameter.py +30 -27
- mindspore/common/tensor.py +713 -1337
- mindspore/communication/__init__.py +1 -1
- mindspore/communication/_comm_helper.py +10 -0
- mindspore/communication/comm_func.py +215 -173
- mindspore/communication/management.py +23 -20
- mindspore/context.py +292 -193
- mindspore/dataset/__init__.py +23 -19
- mindspore/dataset/callback/ds_callback.py +2 -1
- mindspore/dataset/core/config.py +84 -3
- mindspore/dataset/engine/cache_admin.py +3 -3
- mindspore/dataset/engine/cache_client.py +5 -4
- mindspore/dataset/engine/datasets.py +192 -149
- mindspore/dataset/engine/datasets_audio.py +14 -0
- mindspore/dataset/engine/datasets_standard_format.py +28 -11
- mindspore/dataset/engine/datasets_text.py +38 -1
- mindspore/dataset/engine/datasets_user_defined.py +125 -65
- mindspore/dataset/engine/datasets_vision.py +81 -8
- mindspore/dataset/engine/iterators.py +281 -63
- mindspore/dataset/engine/obs/util.py +8 -0
- mindspore/dataset/engine/queue.py +40 -0
- mindspore/dataset/engine/samplers.py +26 -2
- mindspore/dataset/engine/serializer_deserializer.py +1 -1
- mindspore/dataset/engine/validators.py +43 -11
- mindspore/dataset/transforms/py_transforms_util.py +17 -0
- mindspore/dataset/transforms/transforms.py +29 -12
- mindspore/dataset/vision/validators.py +1 -2
- mindspore/device_context/__init__.py +21 -0
- mindspore/device_context/ascend/__init__.py +25 -0
- mindspore/device_context/ascend/device.py +72 -0
- mindspore/device_context/ascend/op_debug.py +94 -0
- mindspore/device_context/ascend/op_precision.py +193 -0
- mindspore/device_context/ascend/op_tuning.py +127 -0
- mindspore/device_context/cpu/__init__.py +25 -0
- mindspore/device_context/cpu/device.py +62 -0
- mindspore/device_context/cpu/op_tuning.py +43 -0
- mindspore/device_context/gpu/__init__.py +21 -0
- mindspore/device_context/gpu/device.py +70 -0
- mindspore/device_context/gpu/op_precision.py +67 -0
- mindspore/device_context/gpu/op_tuning.py +175 -0
- mindspore/device_manager.py +134 -0
- mindspore/dnnl.dll +0 -0
- mindspore/experimental/llm_boost/__init__.py +3 -2
- mindspore/experimental/llm_boost/ascend_native/__init__.py +22 -0
- mindspore/experimental/llm_boost/ascend_native/llama_boost_ascend_native.py +211 -0
- mindspore/experimental/llm_boost/ascend_native/llm_boost.py +52 -0
- mindspore/experimental/llm_boost/atb/boost_base.py +239 -64
- mindspore/experimental/llm_boost/atb/llama_boost.py +52 -30
- mindspore/experimental/llm_boost/atb/qwen_boost.py +47 -24
- mindspore/experimental/llm_boost/register.py +1 -0
- mindspore/experimental/optim/adadelta.py +26 -22
- mindspore/experimental/optim/adam.py +3 -0
- mindspore/experimental/optim/lr_scheduler.py +33 -24
- mindspore/experimental/optim/radam.py +33 -30
- mindspore/hal/device.py +28 -0
- mindspore/hal/event.py +17 -0
- mindspore/hal/memory.py +94 -3
- mindspore/hal/stream.py +91 -6
- mindspore/include/api/context.h +1 -2
- mindspore/include/dataset/constants.h +2 -2
- mindspore/jpeg62.dll +0 -0
- mindspore/log.py +12 -0
- mindspore/mindrecord/__init__.py +1 -1
- mindspore/mindrecord/config.py +17 -316
- mindspore/mindrecord/filereader.py +1 -9
- mindspore/mindrecord/filewriter.py +5 -15
- mindspore/mindrecord/mindpage.py +1 -9
- mindspore/mindspore_backend.dll +0 -0
- mindspore/mindspore_common.dll +0 -0
- mindspore/mindspore_core.dll +0 -0
- mindspore/mindspore_glog.dll +0 -0
- mindspore/mindspore_ops.dll +0 -0
- mindspore/mint/__init__.py +824 -218
- mindspore/mint/distributed/__init__.py +66 -4
- mindspore/mint/distributed/distributed.py +2594 -44
- mindspore/mint/linalg/__init__.py +6 -0
- mindspore/mint/nn/__init__.py +473 -14
- mindspore/mint/nn/functional.py +486 -11
- mindspore/mint/nn/layer/__init__.py +17 -4
- mindspore/mint/nn/layer/_functions.py +330 -0
- mindspore/mint/nn/layer/activation.py +169 -1
- mindspore/mint/nn/layer/basic.py +123 -0
- mindspore/mint/nn/layer/conv.py +727 -0
- mindspore/mint/nn/layer/normalization.py +215 -19
- mindspore/mint/nn/layer/padding.py +797 -0
- mindspore/mint/nn/layer/pooling.py +170 -0
- mindspore/mint/optim/__init__.py +2 -1
- mindspore/mint/optim/adam.py +223 -0
- mindspore/mint/optim/adamw.py +26 -19
- mindspore/mint/special/__init__.py +2 -1
- mindspore/multiprocessing/__init__.py +5 -0
- mindspore/nn/__init__.py +2 -0
- mindspore/nn/cell.py +142 -21
- mindspore/nn/dynamic_lr.py +2 -1
- mindspore/nn/layer/activation.py +6 -6
- mindspore/nn/layer/basic.py +35 -25
- mindspore/nn/layer/channel_shuffle.py +3 -3
- mindspore/nn/layer/conv.py +3 -0
- mindspore/nn/layer/embedding.py +3 -3
- mindspore/nn/layer/normalization.py +8 -7
- mindspore/nn/layer/padding.py +4 -3
- mindspore/nn/layer/pooling.py +55 -23
- mindspore/nn/layer/rnn_cells.py +1 -1
- mindspore/nn/layer/rnns.py +2 -1
- mindspore/nn/layer/timedistributed.py +5 -5
- mindspore/nn/layer/transformer.py +48 -26
- mindspore/nn/learning_rate_schedule.py +5 -3
- mindspore/nn/loss/loss.py +31 -36
- mindspore/nn/optim/ada_grad.py +1 -0
- mindspore/nn/optim/adadelta.py +2 -2
- mindspore/nn/optim/adam.py +1 -1
- mindspore/nn/optim/lars.py +1 -4
- mindspore/nn/optim/optimizer.py +1 -1
- mindspore/nn/optim/rprop.py +2 -2
- mindspore/nn/optim/thor.py +2 -1
- mindspore/nn/utils/__init__.py +22 -0
- mindspore/nn/utils/init.py +73 -0
- mindspore/nn/wrap/cell_wrapper.py +4 -6
- mindspore/nn/wrap/loss_scale.py +3 -4
- mindspore/numpy/array_creations.py +60 -62
- mindspore/numpy/array_ops.py +148 -143
- mindspore/numpy/logic_ops.py +41 -42
- mindspore/numpy/math_ops.py +361 -359
- mindspore/numpy/utils.py +16 -16
- mindspore/numpy/utils_const.py +4 -4
- mindspore/opencv_core452.dll +0 -0
- mindspore/opencv_imgcodecs452.dll +0 -0
- mindspore/opencv_imgproc452.dll +0 -0
- mindspore/ops/__init__.py +2 -1
- mindspore/ops/_grad_experimental/grad_comm_ops.py +107 -8
- mindspore/ops/_grad_experimental/grad_debug_ops.py +6 -1
- mindspore/ops/_grad_experimental/grad_inner_ops.py +9 -0
- mindspore/ops/_grad_experimental/grad_math_ops.py +2 -1
- mindspore/ops/_op_impl/cpu/__init__.py +1 -0
- mindspore/ops/_op_impl/cpu/raise_op.py +28 -0
- mindspore/ops/_vmap/vmap_array_ops.py +20 -19
- mindspore/ops/_vmap/vmap_base.py +0 -2
- mindspore/ops/_vmap/vmap_grad_nn_ops.py +19 -13
- mindspore/ops/_vmap/vmap_math_ops.py +11 -9
- mindspore/ops/_vmap/vmap_nn_ops.py +20 -34
- mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +149 -12
- mindspore/ops/auto_generate/gen_arg_handler.py +0 -61
- mindspore/ops/auto_generate/gen_extend_func.py +554 -60
- mindspore/ops/auto_generate/gen_ops_def.py +1621 -115
- mindspore/ops/auto_generate/gen_ops_prim.py +8027 -3411
- mindspore/ops/auto_generate/pyboost_inner_prim.py +183 -79
- mindspore/ops/composite/base.py +1 -1
- mindspore/ops/composite/multitype_ops/_compile_utils.py +229 -30
- mindspore/ops/composite/multitype_ops/pow_impl.py +0 -29
- mindspore/ops/function/__init__.py +12 -0
- mindspore/ops/function/array_func.py +561 -159
- mindspore/ops/function/clip_func.py +64 -0
- mindspore/ops/function/debug_func.py +28 -20
- mindspore/ops/function/image_func.py +1 -1
- mindspore/ops/function/linalg_func.py +5 -4
- mindspore/ops/function/math_func.py +1664 -294
- mindspore/ops/function/nn_func.py +988 -317
- mindspore/ops/function/parameter_func.py +3 -56
- mindspore/ops/function/random_func.py +243 -33
- mindspore/ops/function/sparse_unary_func.py +1 -1
- mindspore/ops/functional.py +18 -5
- mindspore/ops/functional_overload.py +897 -0
- mindspore/ops/operations/__init__.py +3 -2
- mindspore/ops/operations/_embedding_cache_ops.py +4 -4
- mindspore/ops/operations/_grad_ops.py +2 -34
- mindspore/ops/operations/_infer_ops.py +2 -1
- mindspore/ops/operations/_inner_ops.py +38 -8
- mindspore/ops/operations/array_ops.py +45 -303
- mindspore/ops/operations/comm_ops.py +23 -17
- mindspore/ops/operations/custom_ops.py +7 -49
- mindspore/ops/operations/debug_ops.py +42 -47
- mindspore/ops/operations/inner_ops.py +6 -4
- mindspore/ops/operations/linalg_ops.py +3 -2
- mindspore/ops/operations/manually_defined/ops_def.py +185 -104
- mindspore/ops/operations/math_ops.py +11 -216
- mindspore/ops/operations/nn_ops.py +153 -310
- mindspore/ops/primitive.py +23 -21
- mindspore/ops/tensor_method.py +1669 -0
- mindspore/ops_generate/aclnn_kernel_register_auto_cc_generator.py +110 -0
- mindspore/ops_generate/add_tensor_docs_generator.py +54 -0
- mindspore/ops_generate/arg_handler.py +0 -61
- mindspore/ops_generate/auto_grad_impl_cc_generator.py +135 -0
- mindspore/ops_generate/auto_grad_reg_cc_generator.py +93 -0
- mindspore/ops_generate/base_generator.py +11 -0
- mindspore/ops_generate/cpp_create_prim_instance_helper_generator.py +108 -0
- mindspore/ops_generate/functional_map_cpp_generator.py +491 -0
- mindspore/ops_generate/functional_overload_py_generator.py +110 -0
- mindspore/ops_generate/functions_cc_generator.py +233 -0
- mindspore/ops_generate/gen_aclnn_implement.py +110 -114
- mindspore/ops_generate/gen_constants.py +157 -3
- mindspore/ops_generate/gen_ops.py +245 -990
- mindspore/ops_generate/gen_pyboost_func.py +97 -998
- mindspore/ops_generate/gen_utils.py +119 -33
- mindspore/ops_generate/lite_ops_cpp_generator.py +155 -0
- mindspore/ops_generate/op_api_proto.py +206 -0
- mindspore/ops_generate/op_def_py_generator.py +131 -0
- mindspore/ops_generate/op_prim_py_generator.py +480 -0
- mindspore/ops_generate/op_proto.py +373 -108
- mindspore/ops_generate/op_template_parser.py +436 -0
- mindspore/ops_generate/ops_def_cc_generator.py +288 -0
- mindspore/ops_generate/ops_def_h_generator.py +74 -0
- mindspore/ops_generate/ops_name_h_generator.py +68 -0
- mindspore/ops_generate/ops_primitive_h_generator.py +81 -0
- mindspore/ops_generate/pyboost_functions_cpp_generator.py +370 -0
- mindspore/ops_generate/pyboost_functions_h_generator.py +68 -0
- mindspore/ops_generate/pyboost_functions_py_generator.py +148 -0
- mindspore/ops_generate/pyboost_grad_function_cpp_generator.py +154 -0
- mindspore/ops_generate/pyboost_inner_prim_generator.py +131 -0
- mindspore/ops_generate/pyboost_native_grad_functions_generator.py +268 -0
- mindspore/ops_generate/pyboost_op_cpp_code_generator.py +851 -0
- mindspore/ops_generate/pyboost_overload_functions_cpp_generator.py +344 -0
- mindspore/ops_generate/pyboost_utils.py +92 -33
- mindspore/ops_generate/template.py +294 -44
- mindspore/ops_generate/tensor_func_reg_cpp_generator.py +422 -0
- mindspore/parallel/__init__.py +3 -3
- mindspore/parallel/_auto_parallel_context.py +44 -34
- mindspore/parallel/_cell_wrapper.py +22 -3
- mindspore/parallel/_parallel_serialization.py +13 -2
- mindspore/parallel/_utils.py +4 -2
- mindspore/parallel/algo_parameter_config.py +1 -1
- mindspore/parallel/checkpoint_transform.py +44 -0
- mindspore/parallel/cluster/process_entity/_api.py +131 -37
- mindspore/parallel/cluster/process_entity/_utils.py +41 -6
- mindspore/parallel/cluster/run.py +20 -3
- mindspore/parallel/parameter_broadcast.py +1 -1
- mindspore/parallel/shard.py +3 -0
- mindspore/parallel/transform_safetensors.py +119 -253
- mindspore/profiler/__init__.py +17 -4
- mindspore/profiler/analysis/__init__.py +0 -0
- mindspore/profiler/analysis/parser/__init__.py +0 -0
- mindspore/profiler/analysis/parser/ascend_cann_parser.py +166 -0
- mindspore/profiler/analysis/parser/base_parser.py +158 -0
- mindspore/profiler/analysis/parser/framework_cann_relation_parser.py +45 -0
- mindspore/profiler/analysis/parser/ms_framework_parser.py +142 -0
- mindspore/profiler/analysis/parser/ms_minddata_parser.py +145 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/ascend_timeline_assembler.py +261 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/base_timeline_assembler.py +40 -0
- mindspore/profiler/analysis/parser/timeline_assembly_factory/trace_view_container.py +84 -0
- mindspore/profiler/analysis/parser/timeline_creator/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_creator/base_timeline_creator.py +44 -0
- mindspore/profiler/analysis/parser/timeline_creator/cpu_op_timeline_creator.py +90 -0
- mindspore/profiler/analysis/parser/timeline_creator/fwk_timeline_creator.py +76 -0
- mindspore/profiler/analysis/parser/timeline_creator/msprof_timeline_creator.py +103 -0
- mindspore/profiler/analysis/parser/timeline_creator/scope_layer_timeline_creator.py +134 -0
- mindspore/profiler/analysis/parser/timeline_event/__init__.py +0 -0
- mindspore/profiler/analysis/parser/timeline_event/base_event.py +233 -0
- mindspore/profiler/analysis/parser/timeline_event/cpu_op_event.py +47 -0
- mindspore/profiler/analysis/parser/timeline_event/flow_event.py +36 -0
- mindspore/profiler/analysis/parser/timeline_event/fwk_event.py +260 -0
- mindspore/profiler/analysis/parser/timeline_event/msprof_event.py +73 -0
- mindspore/profiler/analysis/parser/timeline_event/scope_layer_event.py +53 -0
- mindspore/profiler/analysis/parser/timeline_event/timeline_event_pool.py +146 -0
- mindspore/profiler/analysis/task_manager.py +131 -0
- mindspore/profiler/analysis/time_converter.py +84 -0
- mindspore/profiler/analysis/viewer/__init__.py +0 -0
- mindspore/profiler/analysis/viewer/ascend_communication_viewer.py +333 -0
- mindspore/profiler/analysis/viewer/ascend_integrate_viewer.py +87 -0
- mindspore/profiler/analysis/viewer/ascend_kernel_details_viewer.py +252 -0
- mindspore/profiler/analysis/viewer/ascend_memory_viewer.py +313 -0
- mindspore/profiler/analysis/viewer/ascend_op_memory_viewer.py +322 -0
- mindspore/profiler/analysis/viewer/ascend_step_trace_time_viewer.py +265 -0
- mindspore/profiler/analysis/viewer/ascend_timeline_viewer.py +58 -0
- mindspore/profiler/analysis/viewer/base_viewer.py +26 -0
- mindspore/profiler/analysis/viewer/ms_dataset_viewer.py +97 -0
- mindspore/profiler/analysis/viewer/ms_minddata_viewer.py +581 -0
- mindspore/profiler/analysis/work_flow.py +73 -0
- mindspore/profiler/common/ascend_msprof_exporter.py +138 -0
- mindspore/profiler/common/command_executor.py +90 -0
- mindspore/profiler/common/constant.py +174 -3
- mindspore/profiler/common/file_manager.py +208 -0
- mindspore/profiler/common/log.py +130 -0
- mindspore/profiler/common/msprof_cmd_tool.py +202 -0
- mindspore/profiler/common/path_manager.py +371 -0
- mindspore/profiler/common/process_bar.py +168 -0
- mindspore/profiler/common/process_pool.py +9 -3
- mindspore/profiler/common/profiler_context.py +476 -0
- mindspore/profiler/common/profiler_info.py +304 -0
- mindspore/profiler/common/profiler_output_path.py +284 -0
- mindspore/profiler/common/profiler_parameters.py +210 -0
- mindspore/profiler/common/profiler_path_manager.py +120 -0
- mindspore/profiler/common/record_function.py +76 -0
- mindspore/profiler/common/tlv_decoder.py +76 -0
- mindspore/profiler/common/util.py +75 -2
- mindspore/profiler/dynamic_profiler.py +270 -37
- mindspore/profiler/envprofiler.py +138 -0
- mindspore/profiler/mstx.py +199 -0
- mindspore/profiler/platform/__init__.py +21 -0
- mindspore/profiler/platform/base_profiler.py +40 -0
- mindspore/profiler/platform/cpu_profiler.py +124 -0
- mindspore/profiler/platform/gpu_profiler.py +74 -0
- mindspore/profiler/platform/npu_profiler.py +309 -0
- mindspore/profiler/profiler.py +580 -93
- mindspore/profiler/profiler_action_controller.py +187 -0
- mindspore/profiler/profiler_interface.py +114 -0
- mindspore/profiler/schedule.py +208 -0
- mindspore/rewrite/api/symbol_tree.py +1 -2
- mindspore/run_check/_check_version.py +18 -13
- mindspore/runtime/__init__.py +37 -0
- mindspore/runtime/device.py +27 -0
- mindspore/runtime/event.py +209 -0
- mindspore/runtime/executor.py +148 -0
- mindspore/runtime/memory.py +392 -0
- mindspore/runtime/stream.py +460 -0
- mindspore/runtime/thread_bind_core.py +401 -0
- mindspore/swresample-4.dll +0 -0
- mindspore/swscale-6.dll +0 -0
- mindspore/tinyxml2.dll +0 -0
- mindspore/train/__init__.py +2 -2
- mindspore/train/_utils.py +53 -18
- mindspore/train/amp.py +8 -4
- mindspore/train/callback/_checkpoint.py +32 -18
- mindspore/train/callback/_early_stop.py +1 -1
- mindspore/train/callback/_flops_collector.py +105 -69
- mindspore/train/callback/_history.py +1 -1
- mindspore/train/callback/_summary_collector.py +44 -6
- mindspore/train/callback/_tft_register.py +37 -15
- mindspore/train/dataset_helper.py +11 -11
- mindspore/train/metrics/precision.py +4 -5
- mindspore/train/mind_ir_pb2.py +167 -46
- mindspore/train/model.py +13 -14
- mindspore/train/serialization.py +461 -72
- mindspore/train/summary/summary_record.py +1 -2
- mindspore/train/train_thor/model_thor.py +1 -1
- mindspore/turbojpeg.dll +0 -0
- mindspore/utils/__init__.py +4 -2
- mindspore/utils/dryrun.py +138 -0
- mindspore/utils/runtime_execution_order_check.py +550 -0
- mindspore/version.py +1 -1
- {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/METADATA +3 -4
- {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/RECORD +368 -242
- {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/entry_points.txt +1 -1
- mindspore/common/_tensor_overload.py +0 -139
- mindspore/mindspore_np_dtype.dll +0 -0
- mindspore/profiler/envprofiling.py +0 -254
- mindspore/profiler/profiling.py +0 -1926
- {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/WHEEL +0 -0
- {mindspore-2.4.1.dist-info → mindspore-2.5.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
# Copyright 2024 Huawei Technologies Co., Ltd
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ============================================================================
|
|
15
|
+
"""Timeline assembler for Ascend device."""
|
|
16
|
+
from typing import List, Dict, Any
|
|
17
|
+
from decimal import Decimal
|
|
18
|
+
from collections import defaultdict
|
|
19
|
+
|
|
20
|
+
from mindspore import context
|
|
21
|
+
from mindspore import log as logger
|
|
22
|
+
from mindspore.profiler.common.constant import EventConstant, TimelineLayerName, ProfilerLevel
|
|
23
|
+
from mindspore.profiler.analysis.parser.timeline_event.base_event import BaseEvent
|
|
24
|
+
from mindspore.profiler.analysis.parser.timeline_event.timeline_event_pool import TimelineEventPool
|
|
25
|
+
from mindspore.profiler.analysis.parser.timeline_event.flow_event import FlowStartEvent, FlowEndEvent
|
|
26
|
+
from mindspore.profiler.analysis.parser.timeline_creator.fwk_timeline_creator import FwkTimelineCreator
|
|
27
|
+
from mindspore.profiler.analysis.parser.timeline_creator.cpu_op_timeline_creator import CpuOpTimelineCreator
|
|
28
|
+
from mindspore.profiler.analysis.parser.timeline_creator.msprof_timeline_creator import MsprofTimelineCreator
|
|
29
|
+
from mindspore.profiler.analysis.parser.timeline_assembly_factory.base_timeline_assembler import BaseTimelineAssembler
|
|
30
|
+
from mindspore.profiler.analysis.parser.timeline_creator.scope_layer_timeline_creator import (
|
|
31
|
+
ScopeLayerTimelineCreator,
|
|
32
|
+
is_scope_data
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class AscendTimelineAssembler(BaseTimelineAssembler):
|
|
37
|
+
"""Assembler for Ascend device timeline."""
|
|
38
|
+
|
|
39
|
+
def __init__(self, **kwargs):
|
|
40
|
+
super().__init__()
|
|
41
|
+
self._profiler_level = kwargs.get("profiler_level")
|
|
42
|
+
self._context_mode = kwargs.get("context_mode")
|
|
43
|
+
self._init_creators()
|
|
44
|
+
|
|
45
|
+
def _init_creators(self):
|
|
46
|
+
"""Initialize trace creators."""
|
|
47
|
+
self._fwk_creator = FwkTimelineCreator()
|
|
48
|
+
self._cpu_op_creator = CpuOpTimelineCreator()
|
|
49
|
+
self._msprof_creator = MsprofTimelineCreator()
|
|
50
|
+
self._scope_layer_creator = ScopeLayerTimelineCreator()
|
|
51
|
+
|
|
52
|
+
def assemble(self, data: Dict[str, Any]) -> None:
|
|
53
|
+
"""Assemble Ascend timeline from input data."""
|
|
54
|
+
self._assemble_basic_events(data)
|
|
55
|
+
self._assemble_flow_events()
|
|
56
|
+
self._assemble_scope_layer_events()
|
|
57
|
+
|
|
58
|
+
def _assemble_basic_events(self, data: Dict[str, Any]) -> None:
|
|
59
|
+
"""Create and add basic events from input data."""
|
|
60
|
+
self._assemble_events(self._fwk_creator, data.get("mindspore_op_list", []))
|
|
61
|
+
self._assemble_events(self._cpu_op_creator, data.get("cpu_op_lines", []))
|
|
62
|
+
self._assemble_events(self._msprof_creator, data.get("msprof_timeline", []))
|
|
63
|
+
|
|
64
|
+
def _assemble_scope_layer_events(self) -> None:
|
|
65
|
+
"""Create scope layer events."""
|
|
66
|
+
scope_data = []
|
|
67
|
+
|
|
68
|
+
# Get CPU OP scope data if available
|
|
69
|
+
cpu_op_pool = self.trace_view_container.get_pool_by_name(TimelineLayerName.CPU_OP.value)
|
|
70
|
+
if cpu_op_pool:
|
|
71
|
+
for event in cpu_op_pool.get_complete_events():
|
|
72
|
+
if is_scope_data(event):
|
|
73
|
+
scope_data.append(event)
|
|
74
|
+
|
|
75
|
+
# Get Ascend Hardware scope data
|
|
76
|
+
hardware_pool = self.trace_view_container.get_pool_by_name(TimelineLayerName.ASCEND_HARDWARE.value)
|
|
77
|
+
if hardware_pool:
|
|
78
|
+
for event in hardware_pool.get_complete_events():
|
|
79
|
+
if is_scope_data(event) or is_scope_data(event.parent):
|
|
80
|
+
scope_data.append(event)
|
|
81
|
+
|
|
82
|
+
if scope_data:
|
|
83
|
+
self._assemble_events(self._scope_layer_creator, scope_data)
|
|
84
|
+
|
|
85
|
+
def _assemble_events(self, creator, data) -> None:
|
|
86
|
+
"""Create events using creator and add to container."""
|
|
87
|
+
creator.create(data)
|
|
88
|
+
for pool in creator.get_event_pools().values():
|
|
89
|
+
self.trace_view_container.add_event_pool(pool)
|
|
90
|
+
self.trace_view_container.add_trace_events(creator.get_chrome_trace_data())
|
|
91
|
+
|
|
92
|
+
def _assemble_flow_events(self) -> None:
|
|
93
|
+
"""Create and add flow events between timelines."""
|
|
94
|
+
fwk_pool = self.trace_view_container.get_pool_by_name(TimelineLayerName.MINDSPORE.value)
|
|
95
|
+
if not fwk_pool:
|
|
96
|
+
return
|
|
97
|
+
|
|
98
|
+
# Create and add fwk to fwk flows
|
|
99
|
+
fwk_to_fwk_flows = self._create_fwk_to_fwk_flow(fwk_pool)
|
|
100
|
+
self.trace_view_container.add_trace_events(fwk_to_fwk_flows)
|
|
101
|
+
|
|
102
|
+
# Create and add fwk to mstx flows
|
|
103
|
+
for mstx_name in TimelineLayerName.MSTX.value:
|
|
104
|
+
mstx_pool = self.trace_view_container.get_pool_by_name(mstx_name)
|
|
105
|
+
if mstx_pool:
|
|
106
|
+
fwk_to_mstx_flows = self._create_fwk_to_mstx_flow(mstx_pool, fwk_pool)
|
|
107
|
+
self.trace_view_container.add_trace_events(fwk_to_mstx_flows)
|
|
108
|
+
|
|
109
|
+
if self._profiler_level == ProfilerLevel.LevelNone.value:
|
|
110
|
+
return
|
|
111
|
+
|
|
112
|
+
hardware_pool = self.trace_view_container.get_pool_by_name(TimelineLayerName.ASCEND_HARDWARE.value)
|
|
113
|
+
cann_pool = self.trace_view_container.get_pool_by_name(TimelineLayerName.CANN.value)
|
|
114
|
+
if not hardware_pool or not cann_pool:
|
|
115
|
+
return
|
|
116
|
+
|
|
117
|
+
# Collect kernel launch events
|
|
118
|
+
for event in fwk_pool.get_complete_events():
|
|
119
|
+
if any(keyword in event.name for keyword in EventConstant.KERNEL_LAUNCH_KEYWORDS):
|
|
120
|
+
self.trace_view_container.kernel_launch_op_event[event.tid].append(event)
|
|
121
|
+
|
|
122
|
+
# Create and add fwk to hardware flows
|
|
123
|
+
fwk_to_hardware_flows = self._create_fwk_to_hardware_flow()
|
|
124
|
+
self.trace_view_container.add_trace_events(fwk_to_hardware_flows)
|
|
125
|
+
|
|
126
|
+
def _create_fwk_to_hardware_flow(self) -> List[Dict]:
|
|
127
|
+
"""Create flow events between framework and hardware events."""
|
|
128
|
+
acl_to_npu_flow_dict = self._msprof_creator.get_acl_to_npu_flow_dict()
|
|
129
|
+
fwk_launch_op_list = self.trace_view_container.kernel_launch_op_event
|
|
130
|
+
if not acl_to_npu_flow_dict:
|
|
131
|
+
logger.error("Cannot find connection between CANN layer and Ascend Hardware layer.")
|
|
132
|
+
return []
|
|
133
|
+
if not fwk_launch_op_list:
|
|
134
|
+
logger.warning("Cannot find launch op in MindSpore framework. Please verify if it's in graph mode.")
|
|
135
|
+
return []
|
|
136
|
+
if (set(acl_to_npu_flow_dict.keys()) != set(fwk_launch_op_list.keys()) and
|
|
137
|
+
self._context_mode == context.PYNATIVE_MODE):
|
|
138
|
+
logger.warning(
|
|
139
|
+
"The number of launch op threads in MindSpore framework is inconsistent with the CANN layer.")
|
|
140
|
+
|
|
141
|
+
fwk_to_npu_flows = []
|
|
142
|
+
for tid, cann_to_npu_events in acl_to_npu_flow_dict.items():
|
|
143
|
+
fwk_launch_op_sorted = sorted(fwk_launch_op_list.get(tid, []), key=lambda x: x.ts)
|
|
144
|
+
acl_to_npu_events_sorted = sorted(cann_to_npu_events.items(), key=lambda x: Decimal(x[0]))
|
|
145
|
+
|
|
146
|
+
index = 0
|
|
147
|
+
for acl_start_time, device_data_list in acl_to_npu_events_sorted:
|
|
148
|
+
acl_start_time = Decimal(acl_start_time)
|
|
149
|
+
while index < len(fwk_launch_op_sorted):
|
|
150
|
+
fwk_launch_op = fwk_launch_op_sorted[index]
|
|
151
|
+
if fwk_launch_op.ts > acl_start_time:
|
|
152
|
+
break
|
|
153
|
+
if acl_start_time <= fwk_launch_op.te:
|
|
154
|
+
for hardware_event in device_data_list:
|
|
155
|
+
hardware_event.parent = fwk_launch_op
|
|
156
|
+
fwk_launch_op.children.append(hardware_event)
|
|
157
|
+
self.trace_view_container.hardware_op_event[hardware_event.tid].append(hardware_event)
|
|
158
|
+
fwk_to_npu_flows.extend(
|
|
159
|
+
self._create_flow_events(
|
|
160
|
+
fwk_launch_op,
|
|
161
|
+
hardware_event,
|
|
162
|
+
EventConstant.MINDSPORE_NPU_FLOW_NAME,
|
|
163
|
+
EventConstant.MINDSPORE_NPU_FLOW_CAT
|
|
164
|
+
)
|
|
165
|
+
)
|
|
166
|
+
break
|
|
167
|
+
index += 1
|
|
168
|
+
|
|
169
|
+
return fwk_to_npu_flows
|
|
170
|
+
|
|
171
|
+
def _create_fwk_to_fwk_flow(self, framework_pool: TimelineEventPool) -> List[Dict]:
|
|
172
|
+
"""Create flow events between framework events."""
|
|
173
|
+
fwk_to_fwk_flows = []
|
|
174
|
+
for flow_id, flow_pair in framework_pool.get_start_to_end_flow_pairs().items():
|
|
175
|
+
if len(flow_pair["start"]) != 1 or len(flow_pair["end"]) != 1:
|
|
176
|
+
logger.info(
|
|
177
|
+
f"Mindspore op flow expected exactly one start and one end event with flow id {flow_id}, "
|
|
178
|
+
f"but got {len(flow_pair['start'])} start and {len(flow_pair['end'])} end events"
|
|
179
|
+
)
|
|
180
|
+
continue
|
|
181
|
+
|
|
182
|
+
start_event = flow_pair["start"][0]
|
|
183
|
+
end_event = flow_pair["end"][0]
|
|
184
|
+
end_event.parent = start_event
|
|
185
|
+
start_event.children.append(end_event)
|
|
186
|
+
|
|
187
|
+
fwk_to_fwk_flows.extend(
|
|
188
|
+
self._create_flow_events(
|
|
189
|
+
start_event,
|
|
190
|
+
end_event,
|
|
191
|
+
EventConstant.MINDSPORE_SELF_FLOW_NAME,
|
|
192
|
+
EventConstant.MINDSPORE_SELF_FLOW_CAT,
|
|
193
|
+
flow_id
|
|
194
|
+
)
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
return fwk_to_fwk_flows
|
|
198
|
+
|
|
199
|
+
def _create_fwk_to_mstx_flow(self, mstx_pool: TimelineEventPool, fwk_pool: TimelineEventPool) -> List[Dict]:
|
|
200
|
+
"""Create flow events between framework and mstx events."""
|
|
201
|
+
fwk_mstx_api_event_group_by_tid = defaultdict(list)
|
|
202
|
+
for event in fwk_pool.get_complete_events():
|
|
203
|
+
if EventConstant.MSTX_KEYWORD in event.name:
|
|
204
|
+
fwk_mstx_api_event_group_by_tid[event.tid].append(event)
|
|
205
|
+
|
|
206
|
+
fwk_to_mstx_flows = []
|
|
207
|
+
mstx_event_group_by_tid = mstx_pool.complete_event
|
|
208
|
+
|
|
209
|
+
for tid, mstx_event_list in mstx_event_group_by_tid.items():
|
|
210
|
+
sorted_fwk_mstx_api_events = sorted(fwk_mstx_api_event_group_by_tid.get(tid, []), key=lambda x: x.ts)
|
|
211
|
+
sorted_mstx_events = sorted(mstx_event_list, key=lambda x: x.ts)
|
|
212
|
+
|
|
213
|
+
index = 0
|
|
214
|
+
for mstx_event in sorted_mstx_events:
|
|
215
|
+
while index < len(sorted_fwk_mstx_api_events):
|
|
216
|
+
fwk_event = sorted_fwk_mstx_api_events[index]
|
|
217
|
+
if mstx_event.ts < fwk_event.ts:
|
|
218
|
+
break
|
|
219
|
+
if mstx_event.ts <= fwk_event.te:
|
|
220
|
+
mstx_event.parent = fwk_event
|
|
221
|
+
fwk_event.children.append(mstx_event)
|
|
222
|
+
fwk_to_mstx_flows.extend(
|
|
223
|
+
self._create_flow_events(
|
|
224
|
+
fwk_event,
|
|
225
|
+
mstx_event,
|
|
226
|
+
EventConstant.MSTX_FLOW_NAME,
|
|
227
|
+
EventConstant.MSTX_FLOW_CAT,
|
|
228
|
+
)
|
|
229
|
+
)
|
|
230
|
+
index += 1
|
|
231
|
+
break
|
|
232
|
+
index += 1
|
|
233
|
+
|
|
234
|
+
return fwk_to_mstx_flows
|
|
235
|
+
|
|
236
|
+
@staticmethod
|
|
237
|
+
def _create_flow_events(start_event: BaseEvent, end_event: BaseEvent,
|
|
238
|
+
name: str, cat: str, flow_id: str = None) -> List[Dict]:
|
|
239
|
+
"""Create flow start and end events pair."""
|
|
240
|
+
if flow_id is None:
|
|
241
|
+
flow_id = str(end_event.ts)
|
|
242
|
+
|
|
243
|
+
flow_start = FlowStartEvent({
|
|
244
|
+
"name": name,
|
|
245
|
+
"cat": cat,
|
|
246
|
+
"pid": start_event.pid,
|
|
247
|
+
"tid": start_event.tid,
|
|
248
|
+
"ts": start_event.ts,
|
|
249
|
+
"id": flow_id,
|
|
250
|
+
"bp": "e"
|
|
251
|
+
})
|
|
252
|
+
flow_end = FlowEndEvent({
|
|
253
|
+
"name": name,
|
|
254
|
+
"cat": cat,
|
|
255
|
+
"pid": end_event.pid,
|
|
256
|
+
"tid": end_event.tid,
|
|
257
|
+
"ts": end_event.ts,
|
|
258
|
+
"id": flow_id,
|
|
259
|
+
"bp": "e"
|
|
260
|
+
})
|
|
261
|
+
return [flow_start.to_trace_format(), flow_end.to_trace_format()]
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# you may not use this file except in compliance with the License.
|
|
2
|
+
# You may obtain a copy of the License at
|
|
3
|
+
#
|
|
4
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
5
|
+
#
|
|
6
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
7
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
8
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
9
|
+
# See the License for the specific language governing permissions and
|
|
10
|
+
# limitations under the License.
|
|
11
|
+
# ============================================================================
|
|
12
|
+
"""Base class for timeline assembly process."""
|
|
13
|
+
from abc import ABC, abstractmethod
|
|
14
|
+
from typing import Dict, Any
|
|
15
|
+
|
|
16
|
+
from mindspore.profiler.analysis.parser.timeline_assembly_factory.trace_view_container import TraceViewContainer
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class BaseTimelineAssembler(ABC):
|
|
20
|
+
"""Base class for timeline assembly.
|
|
21
|
+
|
|
22
|
+
This class defines the basic interface for timeline assembly process. It coordinates
|
|
23
|
+
different trace event pools and manages the assembly of the final timeline view.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
def __init__(self):
|
|
27
|
+
"""Initialize timeline assembler."""
|
|
28
|
+
self.trace_view_container = TraceViewContainer()
|
|
29
|
+
|
|
30
|
+
@abstractmethod
|
|
31
|
+
def assemble(self, data: Dict[str, Any]) -> None:
|
|
32
|
+
"""Assemble timeline from input data.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
data (Dict[str, Any]): Input data containing various timeline information.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def get_trace_view_container(self) -> TraceViewContainer:
|
|
39
|
+
"""Get the trace container object."""
|
|
40
|
+
return self.trace_view_container
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
# you may not use this file except in compliance with the License.
|
|
2
|
+
# You may obtain a copy of the License at
|
|
3
|
+
#
|
|
4
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
5
|
+
#
|
|
6
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
7
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
8
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
9
|
+
# See the License for the specific language governing permissions and
|
|
10
|
+
# limitations under the License.
|
|
11
|
+
# ============================================================================
|
|
12
|
+
"""Container for managing trace view data and event pools."""
|
|
13
|
+
from typing import Dict, List, Optional
|
|
14
|
+
from collections import defaultdict
|
|
15
|
+
|
|
16
|
+
from mindspore.profiler.analysis.parser.timeline_event.timeline_event_pool import TimelineEventPool
|
|
17
|
+
from mindspore.profiler.analysis.parser.timeline_event.base_event import BaseEvent
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class TraceViewContainer:
|
|
21
|
+
"""Container for trace view data and event pools.
|
|
22
|
+
|
|
23
|
+
This class is responsible for:
|
|
24
|
+
1. Storing and managing trace event pools by process ID
|
|
25
|
+
2. Maintaining process name mappings
|
|
26
|
+
3. Collecting and managing trace view events
|
|
27
|
+
4. Providing access to stored pools and events
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def __init__(self):
|
|
31
|
+
self.event_pools: Dict[int, TimelineEventPool] = {}
|
|
32
|
+
self.name_to_pid: Dict[str, int] = {}
|
|
33
|
+
self.pid_to_name: Dict[int, str] = {}
|
|
34
|
+
self.trace_view: List[Dict] = []
|
|
35
|
+
self._kernel_launch_op_dict: Dict[int, List[BaseEvent]] = defaultdict(list)
|
|
36
|
+
self._hardware_op_event_dict: Dict[int, List[BaseEvent]] = defaultdict(list)
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def kernel_launch_op_event(self) -> Dict[int, List[BaseEvent]]:
|
|
40
|
+
"""Get all kernel launch events."""
|
|
41
|
+
return self._kernel_launch_op_dict
|
|
42
|
+
|
|
43
|
+
@property
|
|
44
|
+
def hardware_op_event(self) -> Dict[int, List[BaseEvent]]:
|
|
45
|
+
"""Get all hardware events."""
|
|
46
|
+
return self._hardware_op_event_dict
|
|
47
|
+
|
|
48
|
+
@kernel_launch_op_event.setter
|
|
49
|
+
def kernel_launch_op_event(self, value):
|
|
50
|
+
self._kernel_launch_op_dict = value
|
|
51
|
+
|
|
52
|
+
@hardware_op_event.setter
|
|
53
|
+
def hardware_op_event(self, value):
|
|
54
|
+
self._hardware_op_event_dict = value
|
|
55
|
+
|
|
56
|
+
def add_event_pool(self, pool: TimelineEventPool) -> None:
|
|
57
|
+
"""Add event pool to container."""
|
|
58
|
+
if pool.name and pool.name in self.name_to_pid:
|
|
59
|
+
raise ValueError(f"Process name '{pool.name}' already exists.")
|
|
60
|
+
self.event_pools[pool.pid] = pool
|
|
61
|
+
if pool.name:
|
|
62
|
+
self.name_to_pid[pool.name] = pool.pid
|
|
63
|
+
self.pid_to_name[pool.pid] = pool.name
|
|
64
|
+
|
|
65
|
+
def add_trace_events(self, events: List[Dict]) -> None:
|
|
66
|
+
"""Add trace view events."""
|
|
67
|
+
self.trace_view.extend(events)
|
|
68
|
+
|
|
69
|
+
def get_pool_by_pid(self, pid: int) -> Optional[TimelineEventPool]:
|
|
70
|
+
"""Get event pool by process ID."""
|
|
71
|
+
return self.event_pools.get(pid)
|
|
72
|
+
|
|
73
|
+
def get_pool_by_name(self, name: str) -> Optional[TimelineEventPool]:
|
|
74
|
+
"""Get event pool by process name."""
|
|
75
|
+
pid = self.name_to_pid.get(name)
|
|
76
|
+
return self.event_pools.get(pid) if pid is not None else None
|
|
77
|
+
|
|
78
|
+
def get_trace_view(self) -> List[Dict]:
|
|
79
|
+
"""Get all trace view events."""
|
|
80
|
+
return self.trace_view
|
|
81
|
+
|
|
82
|
+
def get_all_pools(self) -> List[TimelineEventPool]:
|
|
83
|
+
"""Get all event pools."""
|
|
84
|
+
return list(self.event_pools.values())
|
|
File without changes
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# Copyright 2024 Huawei Technologies Co., Ltd
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ============================================================================
|
|
15
|
+
"""Base class for timeline event creators."""
|
|
16
|
+
from abc import ABC, abstractmethod
|
|
17
|
+
from typing import List, Dict, Any
|
|
18
|
+
|
|
19
|
+
from mindspore.profiler.analysis.parser.timeline_event.timeline_event_pool import TimelineEventPool
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class BaseTimelineCreator(ABC):
|
|
23
|
+
"""Base class for creating timeline event pools."""
|
|
24
|
+
|
|
25
|
+
def __init__(self):
|
|
26
|
+
self.event_pools: Dict[int, TimelineEventPool] = {}
|
|
27
|
+
|
|
28
|
+
@abstractmethod
|
|
29
|
+
def create(self, data: Any) -> None:
|
|
30
|
+
"""Create timeline event pools from input data."""
|
|
31
|
+
|
|
32
|
+
def get_chrome_trace_data(self) -> List[Dict]:
|
|
33
|
+
"""Get all events in chrome trace format."""
|
|
34
|
+
if not self.event_pools:
|
|
35
|
+
return []
|
|
36
|
+
chrome_trace_data = []
|
|
37
|
+
for pool in self.event_pools.values():
|
|
38
|
+
if pool:
|
|
39
|
+
chrome_trace_data.extend(pool.get_all_events_with_trace_format())
|
|
40
|
+
return chrome_trace_data
|
|
41
|
+
|
|
42
|
+
def get_event_pools(self) -> Dict[int, TimelineEventPool]:
|
|
43
|
+
"""Get all timeline event pools."""
|
|
44
|
+
return self.event_pools
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
# Copyright 2024 Huawei Technologies Co., Ltd
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ============================================================================
|
|
15
|
+
"""Timeline creator for CPU operations."""
|
|
16
|
+
from typing import List
|
|
17
|
+
|
|
18
|
+
from mindspore import log as logger
|
|
19
|
+
from mindspore.profiler.common.constant import EventConstant
|
|
20
|
+
from mindspore.profiler.common.constant import TimelineLayerName
|
|
21
|
+
from mindspore.profiler.analysis.parser.timeline_creator.base_timeline_creator import BaseTimelineCreator
|
|
22
|
+
from mindspore.profiler.analysis.parser.timeline_event.timeline_event_pool import TimelineEventPool
|
|
23
|
+
from mindspore.profiler.analysis.parser.timeline_event.cpu_op_event import CpuOpCompleteEvent
|
|
24
|
+
from mindspore.profiler.analysis.parser.timeline_event.cpu_op_event import CpuOpMetaEvent
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class CpuOpTimelineCreator(BaseTimelineCreator):
|
|
28
|
+
"""Create timeline event pools for CPU operations."""
|
|
29
|
+
|
|
30
|
+
def __init__(self):
|
|
31
|
+
super().__init__()
|
|
32
|
+
self.scope_data: List[CpuOpCompleteEvent] = []
|
|
33
|
+
|
|
34
|
+
def create(self, cpu_info_lines: List[str]) -> None:
|
|
35
|
+
"""Create timeline event pools from CPU info lines."""
|
|
36
|
+
if not cpu_info_lines:
|
|
37
|
+
return
|
|
38
|
+
|
|
39
|
+
pool = TimelineEventPool(EventConstant.CPU_OP_PID)
|
|
40
|
+
self.event_pools[EventConstant.CPU_OP_PID] = pool
|
|
41
|
+
|
|
42
|
+
self._create_base_events(pool, cpu_info_lines)
|
|
43
|
+
self._create_meta_event(pool)
|
|
44
|
+
|
|
45
|
+
def _create_base_events(self, pool: TimelineEventPool, cpu_info_lines: List[str]) -> None:
|
|
46
|
+
"""Create base events from CPU info lines."""
|
|
47
|
+
for line in cpu_info_lines:
|
|
48
|
+
line = line.strip()
|
|
49
|
+
if not line:
|
|
50
|
+
continue
|
|
51
|
+
|
|
52
|
+
op_list = line.split(';')
|
|
53
|
+
if len(op_list) < 3:
|
|
54
|
+
logger.warning(f"Invalid CPU info format, expected at least 3 fields but got {len(op_list)}: {line}")
|
|
55
|
+
continue
|
|
56
|
+
|
|
57
|
+
op_full_name, op_type, time_info = op_list[0], op_list[1], op_list[-1]
|
|
58
|
+
|
|
59
|
+
for time in time_info.split():
|
|
60
|
+
time_parts = time.split(',')
|
|
61
|
+
if len(time_parts) != 3:
|
|
62
|
+
logger.warning(f"Invalid time info format, expected 3 fields but got {len(time_parts)}: {time}")
|
|
63
|
+
continue
|
|
64
|
+
|
|
65
|
+
start_time, dur, tid = time_parts
|
|
66
|
+
event = CpuOpCompleteEvent({
|
|
67
|
+
'name': op_full_name,
|
|
68
|
+
'tid': int(tid),
|
|
69
|
+
'ts': str(start_time),
|
|
70
|
+
'dur': str(dur),
|
|
71
|
+
'args': {'type': op_type}
|
|
72
|
+
})
|
|
73
|
+
pool.add_event(event)
|
|
74
|
+
|
|
75
|
+
@staticmethod
|
|
76
|
+
def _create_meta_event(pool: TimelineEventPool) -> None:
|
|
77
|
+
"""Create meta events for CPU operations."""
|
|
78
|
+
process_meta_name_and_args = [
|
|
79
|
+
(EventConstant.PROCESS_NAME, {"name": TimelineLayerName.CPU_OP.value}),
|
|
80
|
+
(EventConstant.PROCESS_SORT, {"sort_index": EventConstant.CPU_OP_SORT_IDX}),
|
|
81
|
+
(EventConstant.PROCESS_LABEL, {"labels": EventConstant.CPU_LABEL})
|
|
82
|
+
]
|
|
83
|
+
for name, args in process_meta_name_and_args:
|
|
84
|
+
pool.add_event(CpuOpMetaEvent({"name": name, "tid": 0, "args": args}))
|
|
85
|
+
|
|
86
|
+
for tid in pool.get_all_tids():
|
|
87
|
+
pool.add_event(CpuOpMetaEvent({"name": EventConstant.THREAD_NAME, "tid": tid,
|
|
88
|
+
"args": {"name": f"Thread {tid}"}}))
|
|
89
|
+
pool.add_event(CpuOpMetaEvent({"name": EventConstant.THREAD_SORT, "tid": tid,
|
|
90
|
+
"args": {"sort_index": tid}}))
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
# Copyright 2024 Huawei Technologies Co., Ltd
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ============================================================================
|
|
15
|
+
"""Timeline creator for framework operations."""
|
|
16
|
+
from typing import List, Dict
|
|
17
|
+
|
|
18
|
+
from mindspore.profiler.common.constant import EventConstant, FileConstant, TimelineLayerName
|
|
19
|
+
from mindspore.profiler.analysis.parser.timeline_event.timeline_event_pool import TimelineEventPool
|
|
20
|
+
from mindspore.profiler.analysis.parser.timeline_creator.base_timeline_creator import BaseTimelineCreator
|
|
21
|
+
from mindspore.profiler.analysis.parser.timeline_event.fwk_event import (
|
|
22
|
+
FwkCompleteEvent,
|
|
23
|
+
FwkInstantEvent,
|
|
24
|
+
OpRangeStructField,
|
|
25
|
+
FwkMetaEvent
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class FwkTimelineCreator(BaseTimelineCreator):
|
|
30
|
+
"""Create timeline event pools for framework operations."""
|
|
31
|
+
|
|
32
|
+
def create(self, fwk_tlv_data: List[Dict]) -> None:
|
|
33
|
+
"""Create timeline event pools from framework TLV data."""
|
|
34
|
+
if not fwk_tlv_data:
|
|
35
|
+
return
|
|
36
|
+
|
|
37
|
+
pool = TimelineEventPool(EventConstant.MINDSPORE_PID)
|
|
38
|
+
self.event_pools[EventConstant.MINDSPORE_PID] = pool
|
|
39
|
+
|
|
40
|
+
self._create_base_events(pool, fwk_tlv_data)
|
|
41
|
+
self._create_meta_event(pool)
|
|
42
|
+
|
|
43
|
+
def _create_base_events(self, pool: TimelineEventPool, fwk_tlv_data: List[Dict]) -> None:
|
|
44
|
+
"""Create base events from framework TLV data."""
|
|
45
|
+
for data in fwk_tlv_data:
|
|
46
|
+
if data[FileConstant.FIX_SIZE_DATA][OpRangeStructField.START_NS.value] == 0: # Filter abnormal data
|
|
47
|
+
continue
|
|
48
|
+
if (data[FileConstant.FIX_SIZE_DATA][OpRangeStructField.START_NS.value] ==
|
|
49
|
+
data[FileConstant.FIX_SIZE_DATA][OpRangeStructField.END_NS.value]): # dur == 0
|
|
50
|
+
event = FwkInstantEvent(data)
|
|
51
|
+
else:
|
|
52
|
+
event = FwkCompleteEvent(data)
|
|
53
|
+
if event.name == EventConstant.FLOW_OP:
|
|
54
|
+
pool.add_start_event(str(event.id), event)
|
|
55
|
+
continue
|
|
56
|
+
if event.id != EventConstant.INVALID_FLOW_ID:
|
|
57
|
+
pool.add_end_event(str(event.id), event)
|
|
58
|
+
pool.add_event(event)
|
|
59
|
+
|
|
60
|
+
@staticmethod
|
|
61
|
+
def _create_meta_event(pool: TimelineEventPool) -> None:
|
|
62
|
+
"""Create meta events for framework operations."""
|
|
63
|
+
process_meta_events = [
|
|
64
|
+
(EventConstant.PROCESS_NAME, {"name": TimelineLayerName.MINDSPORE.value}),
|
|
65
|
+
(EventConstant.PROCESS_SORT, {"sort_index": EventConstant.MINDSPORE_SORT_IDX}),
|
|
66
|
+
(EventConstant.PROCESS_LABEL, {"labels": EventConstant.CPU_LABEL})
|
|
67
|
+
]
|
|
68
|
+
|
|
69
|
+
for name, args in process_meta_events:
|
|
70
|
+
pool.add_event(FwkMetaEvent({"name": name, "tid": 0, "args": args}))
|
|
71
|
+
|
|
72
|
+
for tid in pool.get_all_tids():
|
|
73
|
+
pool.add_event(FwkMetaEvent({"name": EventConstant.THREAD_NAME, "tid": tid,
|
|
74
|
+
"args": {"name": f"Thread {tid}"}}))
|
|
75
|
+
pool.add_event(FwkMetaEvent({"name": EventConstant.THREAD_SORT, "tid": tid,
|
|
76
|
+
"args": {"sort_index": tid}}))
|