mindspore 2.7.0__cp310-cp310-win_amd64.whl → 2.7.0rc1__cp310-cp310-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/__init__.py +1 -1
- mindspore/_c_dataengine.cp310-win_amd64.pyd +0 -0
- mindspore/_c_expression.cp310-win_amd64.pyd +0 -0
- mindspore/_c_mindrecord.cp310-win_amd64.pyd +0 -0
- mindspore/_checkparam.py +2 -2
- mindspore/_extends/builtin_operations.py +3 -3
- mindspore/_extends/parallel_compile/akg_compiler/gen_custom_op_files.py +1 -1
- mindspore/_extends/parse/__init__.py +3 -3
- mindspore/_extends/parse/deprecated/deprecated_tensor_method.py +1 -0
- mindspore/_extends/parse/parser.py +22 -28
- mindspore/_extends/parse/standard_method.py +1 -15
- mindspore/_extends/pijit/pijit_func_white_list.py +5 -2
- mindspore/_extends/remote/kernel_build_server_ascend.py +75 -0
- mindspore/amp.py +18 -0
- mindspore/avcodec-59.dll +0 -0
- mindspore/avdevice-59.dll +0 -0
- mindspore/avfilter-8.dll +0 -0
- mindspore/avformat-59.dll +0 -0
- mindspore/avutil-57.dll +0 -0
- mindspore/common/__init__.py +12 -18
- mindspore/common/_tensor_cpp_method.py +1 -1
- mindspore/common/_tensor_docs.py +38 -102
- mindspore/common/_utils.py +1 -9
- mindspore/common/api.py +106 -155
- mindspore/common/{dynamic_shape/auto_dynamic_shape.py → auto_dynamic_shape.py} +23 -17
- mindspore/common/dtype.py +57 -98
- mindspore/common/dump.py +1 -1
- mindspore/common/file_system.py +9 -59
- mindspore/common/hook_handle.py +3 -22
- mindspore/common/np_dtype.py +3 -3
- mindspore/common/parameter.py +20 -4
- mindspore/common/recompute.py +4 -2
- mindspore/common/tensor.py +52 -38
- mindspore/communication/_hccl_management.py +297 -0
- mindspore/context.py +21 -15
- mindspore/dataset/__init__.py +1 -1
- mindspore/dataset/audio/transforms.py +1 -1
- mindspore/dataset/core/config.py +1 -35
- mindspore/dataset/engine/datasets.py +315 -330
- mindspore/dataset/engine/datasets_user_defined.py +22 -38
- mindspore/dataset/transforms/c_transforms.py +2 -2
- mindspore/dataset/transforms/transforms.py +3 -3
- mindspore/dataset/vision/__init__.py +1 -1
- mindspore/dataset/vision/py_transforms.py +8 -8
- mindspore/dataset/vision/transforms.py +5 -17
- mindspore/dataset/vision/utils.py +21 -632
- mindspore/device_context/ascend/op_tuning.py +1 -35
- mindspore/dnnl.dll +0 -0
- mindspore/experimental/llm_boost/ascend_native/llama_boost_ascend_native.py +0 -3
- mindspore/include/api/cell.h +4 -28
- mindspore/include/api/cfg.h +7 -24
- mindspore/include/api/context.h +0 -1
- mindspore/include/api/delegate.h +2 -0
- mindspore/include/api/dual_abi_helper.h +19 -100
- mindspore/include/api/graph.h +1 -14
- mindspore/include/api/kernel.h +3 -16
- mindspore/include/api/kernel_api.h +1 -9
- mindspore/include/api/metrics/accuracy.h +0 -9
- mindspore/include/api/model.h +1 -5
- mindspore/include/api/model_group.h +0 -4
- mindspore/include/api/model_parallel_runner.h +0 -2
- mindspore/include/api/status.h +10 -48
- mindspore/include/api/types.h +1 -6
- mindspore/include/dataset/constants.h +0 -9
- mindspore/jpeg62.dll +0 -0
- mindspore/mindrecord/tools/cifar10.py +2 -3
- mindspore/mindrecord/tools/cifar10_to_mr.py +5 -5
- mindspore/mindspore_backend_common.dll +0 -0
- mindspore/mindspore_backend_manager.dll +0 -0
- mindspore/mindspore_common.dll +0 -0
- mindspore/mindspore_core.dll +0 -0
- mindspore/mindspore_cpu_res_manager.dll +0 -0
- mindspore/mindspore_dump.dll +0 -0
- mindspore/mindspore_frontend.dll +0 -0
- mindspore/mindspore_glog.dll +0 -0
- mindspore/mindspore_memory_pool.dll +0 -0
- mindspore/mindspore_ms_backend.dll +0 -0
- mindspore/mindspore_ops.dll +0 -0
- mindspore/mindspore_ops_host.dll +0 -0
- mindspore/mindspore_ops_kernel_common.dll +0 -0
- mindspore/mindspore_profiler.dll +0 -0
- mindspore/mindspore_pyboost.dll +0 -0
- mindspore/mindspore_pynative.dll +0 -0
- mindspore/mindspore_res_manager.dll +0 -0
- mindspore/mindspore_runtime_pipeline.dll +0 -0
- mindspore/mint/distributed/__init__.py +0 -4
- mindspore/mint/distributed/distributed.py +14 -217
- mindspore/mint/nn/layer/_functions.py +2 -1
- mindspore/mint/nn/layer/conv.py +6 -6
- mindspore/mint/nn/layer/normalization.py +3 -3
- mindspore/nn/cell.py +174 -216
- mindspore/nn/layer/activation.py +2 -4
- mindspore/nn/layer/basic.py +13 -7
- mindspore/nn/layer/image.py +1 -1
- mindspore/nn/optim/adam.py +3 -1
- mindspore/nn/optim/lamb.py +3 -1
- mindspore/nn/optim/tft_wrapper.py +3 -2
- mindspore/nn/probability/distribution/_utils/utils.py +2 -2
- mindspore/nn/wrap/cell_wrapper.py +5 -39
- mindspore/nn/wrap/grad_reducer.py +15 -0
- mindspore/numpy/array_creations.py +2 -2
- mindspore/numpy/utils_const.py +1 -1
- mindspore/opencv_core452.dll +0 -0
- mindspore/opencv_imgcodecs452.dll +0 -0
- mindspore/opencv_imgproc452.dll +0 -0
- mindspore/ops/_grad_experimental/grad_inner_ops.py +9 -0
- mindspore/ops/_op_impl/cpu/__init__.py +0 -1
- mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +2 -12
- mindspore/ops/auto_generate/gen_extend_func.py +4 -4
- mindspore/ops/auto_generate/gen_ops_def.py +16 -290
- mindspore/ops/auto_generate/gen_ops_prim.py +76 -563
- mindspore/ops/composite/base.py +1 -1
- mindspore/ops/composite/multitype_ops/_constexpr_utils.py +1 -1
- mindspore/ops/function/__init__.py +0 -1
- mindspore/ops/function/array_func.py +6 -10
- mindspore/ops/function/debug_func.py +2 -4
- mindspore/ops/function/grad/grad_func.py +12 -4
- mindspore/ops/function/math_func.py +32 -44
- mindspore/ops/function/nn_func.py +20 -18
- mindspore/ops/functional.py +1 -2
- mindspore/ops/functional_overload.py +12 -23
- mindspore/ops/operations/_inner_ops.py +12 -11
- mindspore/ops/operations/array_ops.py +50 -4
- mindspore/ops/operations/comm_ops.py +15 -1
- mindspore/ops/operations/custom_ops.py +4 -10
- mindspore/ops/operations/debug_ops.py +6 -6
- mindspore/ops/operations/manually_defined/ops_def.py +12 -12
- mindspore/ops/operations/math_ops.py +5 -5
- mindspore/ops/operations/nn_ops.py +1 -1
- mindspore/ops/primitive.py +10 -3
- mindspore/ops/tensor_method.py +7 -16
- mindspore/ops_generate/pyboost/gen_pyboost_func.py +16 -0
- mindspore/parallel/_auto_parallel_context.py +15 -5
- mindspore/parallel/_parallel_serialization.py +2 -3
- mindspore/parallel/_ps_context.py +2 -2
- mindspore/parallel/_transformer/transformer.py +4 -4
- mindspore/parallel/_utils.py +11 -5
- mindspore/parallel/auto_parallel.py +9 -23
- mindspore/parallel/checkpoint_transform.py +0 -2
- mindspore/parallel/cluster/process_entity/_api.py +1 -4
- mindspore/parallel/cluster/run.py +3 -5
- mindspore/parallel/function/reshard_func.py +5 -6
- mindspore/parallel/nn/parallel_cell_wrapper.py +3 -40
- mindspore/parallel/nn/parallel_grad_reducer.py +8 -0
- mindspore/parallel/shard.py +21 -7
- mindspore/parallel/transform_safetensors.py +4 -10
- mindspore/profiler/analysis/viewer/ascend_kernel_details_viewer.py +9 -10
- mindspore/profiler/analysis/viewer/ascend_op_memory_viewer.py +1 -1
- mindspore/profiler/common/msprof_cmd_tool.py +2 -2
- mindspore/profiler/common/path_manager.py +0 -9
- mindspore/profiler/common/profiler_context.py +2 -25
- mindspore/profiler/common/profiler_meta_data.py +0 -1
- mindspore/profiler/common/profiler_op_analyse.py +6 -10
- mindspore/{ops/_op_impl/cpu/joinedstr_op.py → profiler/common/validator/__init__.py} +1 -15
- mindspore/profiler/common/validator/validate_path.py +84 -0
- mindspore/profiler/dynamic_profiler.py +46 -91
- mindspore/profiler/envprofiler.py +5 -30
- mindspore/profiler/experimental_config.py +1 -16
- mindspore/profiler/platform/cpu_profiler.py +4 -10
- mindspore/profiler/platform/npu_profiler.py +1 -1
- mindspore/profiler/profiler.py +145 -193
- mindspore/profiler/profiler_action_controller.py +1 -1
- mindspore/profiler/profiler_interface.py +2 -2
- mindspore/rewrite/symbol_tree/symbol_tree.py +1 -1
- mindspore/runtime/__init__.py +4 -6
- mindspore/runtime/executor.py +0 -27
- mindspore/runtime/memory.py +0 -1
- mindspore/runtime/thread_bind_core.py +1 -1
- mindspore/swresample-4.dll +0 -0
- mindspore/swscale-6.dll +0 -0
- mindspore/tinyxml2.dll +0 -0
- mindspore/train/_utils.py +3 -3
- mindspore/train/amp.py +3 -0
- mindspore/train/callback/_callback.py +1 -2
- mindspore/train/callback/_checkpoint.py +8 -1
- mindspore/train/callback/_flops_collector.py +6 -10
- mindspore/train/callback/_train_fault_tolerance.py +7 -3
- mindspore/train/data_sink.py +4 -4
- mindspore/train/dataset_helper.py +5 -5
- mindspore/train/model.py +20 -4
- mindspore/train/serialization.py +15 -35
- mindspore/train/train_thor/model_thor.py +2 -2
- mindspore/turbojpeg.dll +0 -0
- mindspore/utils/hooks.py +81 -0
- mindspore/utils/utils.py +8 -8
- mindspore/version.py +1 -1
- {mindspore-2.7.0.dist-info → mindspore-2.7.0rc1.dist-info}/METADATA +1 -1
- {mindspore-2.7.0.dist-info → mindspore-2.7.0rc1.dist-info}/RECORD +193 -192
- mindspore/_extends/parallel_compile/akg_compiler/custom.py +0 -1109
- mindspore/common/dynamic_shape/__init__.py +0 -0
- mindspore/common/dynamic_shape/enable_dynamic.py +0 -197
- /mindspore/common/{dynamic_shape/_auto_dynamic.py → _auto_dynamic.py} +0 -0
- {mindspore-2.7.0.dist-info → mindspore-2.7.0rc1.dist-info}/WHEEL +0 -0
- {mindspore-2.7.0.dist-info → mindspore-2.7.0rc1.dist-info}/entry_points.txt +0 -0
- {mindspore-2.7.0.dist-info → mindspore-2.7.0rc1.dist-info}/top_level.txt +0 -0
|
File without changes
|
|
@@ -1,197 +0,0 @@
|
|
|
1
|
-
# This is the Python adaptation and derivative work of Myia (https://github.com/mila-iqia/myia/).
|
|
2
|
-
#
|
|
3
|
-
# Copyright 2025 Huawei Technologies Co., Ltd
|
|
4
|
-
#
|
|
5
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
6
|
-
# you may not use this file except in compliance with the License.
|
|
7
|
-
# You may obtain a copy of the License at
|
|
8
|
-
#
|
|
9
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
-
#
|
|
11
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
12
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
-
# See the License for the specific language governing permissions and
|
|
15
|
-
# limitations under the License.
|
|
16
|
-
# ============================================================================
|
|
17
|
-
"""Define enable_dynamic decorator."""
|
|
18
|
-
import types
|
|
19
|
-
import inspect
|
|
20
|
-
from mindspore import log as logger
|
|
21
|
-
from mindspore.common.tensor import Tensor
|
|
22
|
-
from mindspore.common._utils import get_func, is_dim_unknown
|
|
23
|
-
from mindspore.common.dynamic_shape.auto_dynamic_shape import SHAPE_DIM_ANY
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
ENABLE_DYNAMIC = "__enable_dynamic__"
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
def _check_element_valid(item, shape, name):
|
|
30
|
-
"""Check elements in shape."""
|
|
31
|
-
if item is not SHAPE_DIM_ANY and (isinstance(item, int) and item <= 0):
|
|
32
|
-
raise TypeError(f"The argument '{name}' has invalid shape '{shape}', only supports None " \
|
|
33
|
-
f"or a tuple/list of positive integers and None.")
|
|
34
|
-
return True
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
def _check_arg_shape_valid(arg, name):
|
|
38
|
-
"""Check if the shape of arg is valid"""
|
|
39
|
-
#if the shape of arg is None
|
|
40
|
-
if isinstance(arg, Tensor) and is_dim_unknown(arg.shape):
|
|
41
|
-
return True
|
|
42
|
-
if isinstance(arg, Tensor) and \
|
|
43
|
-
SHAPE_DIM_ANY in arg.shape and \
|
|
44
|
-
all(_check_element_valid(item, arg.shape, name) for item in arg.shape):
|
|
45
|
-
return True
|
|
46
|
-
if isinstance(arg, (tuple, list)) and any(_check_arg_shape_valid(item, name) for item in arg):
|
|
47
|
-
return True
|
|
48
|
-
return False
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
def _check_arg_type_valid(arg, name):
|
|
52
|
-
"""Check if the type of arg is valid."""
|
|
53
|
-
if isinstance(arg, Tensor):
|
|
54
|
-
return
|
|
55
|
-
if isinstance(arg, (tuple, list)):
|
|
56
|
-
for item in arg:
|
|
57
|
-
_check_arg_type_valid(item, name)
|
|
58
|
-
else:
|
|
59
|
-
raise TypeError(f"The decorator enable_dynamic only supports Tensor " \
|
|
60
|
-
f"or a tuple/list of Tensor, but the argument : {name} is type of:{type(arg)}.")
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
def _check_input_valid(arg):
|
|
64
|
-
"""Check if real argument is valid."""
|
|
65
|
-
if isinstance(arg, Tensor):
|
|
66
|
-
if not all(isinstance(item, int) and item > 0 for item in arg.shape):
|
|
67
|
-
raise ValueError(f"When using decorator enable_dynamic, the corresponding shape of inputs should be " \
|
|
68
|
-
f"a tuple/list of positive integers")
|
|
69
|
-
elif isinstance(arg, (tuple, list)):
|
|
70
|
-
for item in arg:
|
|
71
|
-
_check_input_valid(item)
|
|
72
|
-
else:
|
|
73
|
-
raise TypeError(f"When using decorator enable_dynamic, the corresponding inputs only supports Tensor " \
|
|
74
|
-
f"or a tuple/list of Tensor.")
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
def _check_arg_type_shape(arg, dyn_arg, name):
|
|
78
|
-
"""Check the type, shape and dtype of real argument."""
|
|
79
|
-
if isinstance(arg, Tensor) and isinstance(dyn_arg, Tensor):
|
|
80
|
-
if arg.dtype != dyn_arg.dtype:
|
|
81
|
-
raise TypeError(f"When using decorator enable_dynamic, input tensor dtype = {arg.dtype}, " \
|
|
82
|
-
f"dynamic tensor dtype = {dyn_arg.dtype}, tensor dtypes are not the same.")
|
|
83
|
-
if is_dim_unknown(dyn_arg.shape):
|
|
84
|
-
return
|
|
85
|
-
if len(arg.shape) != len(dyn_arg.shape) or \
|
|
86
|
-
any(y is not SHAPE_DIM_ANY and x != y for x, y in zip(arg.shape, dyn_arg.shape)):
|
|
87
|
-
raise ValueError(f"When using decorator enable_dynamic, input tensor shape = {arg.shape}, " \
|
|
88
|
-
f"dynamic tensor shape = {dyn_arg.shape}, tensor shapes are not the same.")
|
|
89
|
-
elif isinstance(arg, (tuple, list)) and isinstance(dyn_arg, (tuple, list)):
|
|
90
|
-
if len(arg) != len(dyn_arg):
|
|
91
|
-
raise ValueError("Input sequences must have the same structure and length.")
|
|
92
|
-
for x, y in zip(arg, dyn_arg):
|
|
93
|
-
_check_arg_type_shape(x, y, name)
|
|
94
|
-
else:
|
|
95
|
-
raise TypeError(f"When using decorator enable_dynamic, the type between argument '{name}' " \
|
|
96
|
-
f"and corresponding input are not the same.")
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
def generate_dynamic_sequence_args(args_list, dyn_args_list):
|
|
100
|
-
"""Generate dynamic shapes for input sequence"""
|
|
101
|
-
if isinstance(args_list, Tensor):
|
|
102
|
-
return dyn_args_list if args_list.shape != dyn_args_list.shape else args_list
|
|
103
|
-
result = []
|
|
104
|
-
for x, y in zip(args_list, dyn_args_list):
|
|
105
|
-
result.append(generate_dynamic_sequence_args(x, y))
|
|
106
|
-
return type(args_list)(result)
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
def generate_dynamic_tensor_args(args_list, dynamic_shapes):
|
|
110
|
-
"""Generate compile args with dynamic_shapes"""
|
|
111
|
-
new_compile_args = list(args_list)
|
|
112
|
-
for index, arg in enumerate(args_list):
|
|
113
|
-
if isinstance(arg, (tuple, list)) and not hasattr(arg, "__ms_mutable__"):
|
|
114
|
-
raise ValueError(f"When using decorator enable_dynamic, the corresponding attribute of input should be " \
|
|
115
|
-
f"mutable(tuple/list)")
|
|
116
|
-
if index not in dynamic_shapes:
|
|
117
|
-
continue
|
|
118
|
-
_check_input_valid(arg)
|
|
119
|
-
name, dyn_arg = dynamic_shapes[index]
|
|
120
|
-
_check_arg_type_shape(arg, dyn_arg, name)
|
|
121
|
-
new_compile_args[index] = generate_dynamic_sequence_args(arg, dyn_arg)
|
|
122
|
-
logger.debug(f"args_list: {args_list}, dynamic_shapes: {dynamic_shapes}, " \
|
|
123
|
-
f"new_compile_args: {new_compile_args}")
|
|
124
|
-
return new_compile_args
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
def enable_dynamic(**kwargs):
|
|
128
|
-
"""
|
|
129
|
-
Use to specify whether the shape of the parameter is dynamic shape or dynamic rank.
|
|
130
|
-
|
|
131
|
-
Note:
|
|
132
|
-
- It needs to be used in conjunction with the JIT interface. Without using the JIT decorator,
|
|
133
|
-
the dynamic shape and dynamic rank functions will not be enabled.
|
|
134
|
-
- In the scenario where both set_context(mode=GRAPH_MODE) and nn.Cell are set simultaneously,
|
|
135
|
-
use enabled_dynamic to report an error.
|
|
136
|
-
|
|
137
|
-
Args:
|
|
138
|
-
kwargs (dict): The input types are Tensor, tuple[Tensor] and list[Tensor]. If one or
|
|
139
|
-
more dimensions in the shape of the parameter need to be specified as dynamic shapes,
|
|
140
|
-
the corresponding dimensions in the shape can be set to None. If the shape that needs
|
|
141
|
-
to generate specified parameters is dynamic rank, the shape can be set to None.
|
|
142
|
-
|
|
143
|
-
Returns:
|
|
144
|
-
Function, return a function that specifies the dynamic shape information of the parameter.
|
|
145
|
-
|
|
146
|
-
Supported Platforms:
|
|
147
|
-
``Ascend`` ``GPU`` ``CPU``
|
|
148
|
-
|
|
149
|
-
Examples:
|
|
150
|
-
>>> import numpy as np
|
|
151
|
-
>>> import mindspore as ms
|
|
152
|
-
>>> from mindspore import Tensor
|
|
153
|
-
>>> from mindspore import enable_dynamic
|
|
154
|
-
>>> from mindspore import jit
|
|
155
|
-
...
|
|
156
|
-
>>> x = Tensor(np.random.randn(2, 3), ms.float32)
|
|
157
|
-
>>> y = Tensor(np.random.randn(2, 3), ms.float32)
|
|
158
|
-
...
|
|
159
|
-
>>> # Specify parameter y as dynamic shape
|
|
160
|
-
>>> @enable_dynamic(y=Tensor(shape=None, dtype=ms.float32))
|
|
161
|
-
>>> @jit
|
|
162
|
-
>>> def func(x, y):
|
|
163
|
-
... return x + 1, y + 1
|
|
164
|
-
...
|
|
165
|
-
>>> out = func(x, y)
|
|
166
|
-
"""
|
|
167
|
-
# Check inputs at first.
|
|
168
|
-
if not kwargs:
|
|
169
|
-
raise ValueError(f"When using decorator enable_dynamic, the input cannot be empty!")
|
|
170
|
-
for name, arg in kwargs.items():
|
|
171
|
-
_check_arg_type_valid(arg, name)
|
|
172
|
-
if not _check_arg_shape_valid(arg, name):
|
|
173
|
-
raise TypeError(f"When using decorator enable_dynamic, the shape of argument '{name}' " \
|
|
174
|
-
f"at least have one None.")
|
|
175
|
-
|
|
176
|
-
def decorator(func):
|
|
177
|
-
if not isinstance(func, (types.FunctionType, types.MethodType)):
|
|
178
|
-
raise ValueError(f"Decorator enable_dynamic can only be used for function or method " \
|
|
179
|
-
f"decrocated by ms.jit, but got {func}.")
|
|
180
|
-
signature = inspect.signature(func)
|
|
181
|
-
sigs_name = [sig_name for sig_name in signature.parameters if sig_name != "self"]
|
|
182
|
-
if len(kwargs) > len(sigs_name):
|
|
183
|
-
raise ValueError(f"When using decorator enable_dynamic, the number of arguments {len(kwargs)} " \
|
|
184
|
-
f"exceeds the number of function arguments {len(sigs_name)}.")
|
|
185
|
-
# Generate dynamic args.
|
|
186
|
-
dynamic_args = dict()
|
|
187
|
-
for key, value in kwargs.items():
|
|
188
|
-
index = sigs_name.index(key)
|
|
189
|
-
if index in dynamic_args:
|
|
190
|
-
raise ValueError(f"keyword argument repeated: {key}")
|
|
191
|
-
dynamic_args[index] = (key, value)
|
|
192
|
-
# Set dynamic_tensor_shape to func.
|
|
193
|
-
inner_func = inspect.unwrap(func, stop=lambda f: not hasattr(f, '__wrapped__'))
|
|
194
|
-
setattr(get_func(inner_func), ENABLE_DYNAMIC, dynamic_args)
|
|
195
|
-
logger.info(f"Set enable dynamic: {dynamic_args} to {inner_func}")
|
|
196
|
-
return func
|
|
197
|
-
return decorator
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|