mindspore 2.3.0__cp310-cp310-win_amd64.whl → 2.4.0__cp310-cp310-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mindspore might be problematic. Click here for more details.
- mindspore/.commit_id +1 -1
- mindspore/Microsoft.VisualStudio.Telemetry.dll +0 -0
- mindspore/Newtonsoft.Json.dll +0 -0
- mindspore/__init__.py +3 -1
- mindspore/_c_dataengine.cp310-win_amd64.pyd +0 -0
- mindspore/_c_expression.cp310-win_amd64.pyd +0 -0
- mindspore/_c_mindrecord.cp310-win_amd64.pyd +0 -0
- mindspore/_checkparam.py +50 -9
- mindspore/_extends/parse/compile_config.py +41 -0
- mindspore/_extends/parse/parser.py +9 -7
- mindspore/_extends/parse/standard_method.py +52 -14
- mindspore/_extends/pijit/pijit_func_white_list.py +350 -24
- mindspore/amp.py +24 -10
- mindspore/atlprov.dll +0 -0
- mindspore/avcodec-59.dll +0 -0
- mindspore/avdevice-59.dll +0 -0
- mindspore/avfilter-8.dll +0 -0
- mindspore/avformat-59.dll +0 -0
- mindspore/avutil-57.dll +0 -0
- mindspore/c1.dll +0 -0
- mindspore/c1xx.dll +0 -0
- mindspore/c2.dll +0 -0
- mindspore/common/__init__.py +6 -4
- mindspore/common/_pijit_context.py +190 -0
- mindspore/common/_register_for_tensor.py +2 -1
- mindspore/common/_tensor_overload.py +139 -0
- mindspore/common/api.py +102 -87
- mindspore/common/dump.py +5 -6
- mindspore/common/generator.py +1 -7
- mindspore/common/hook_handle.py +14 -26
- mindspore/common/mindir_util.py +2 -2
- mindspore/common/parameter.py +46 -13
- mindspore/common/recompute.py +39 -9
- mindspore/common/sparse_tensor.py +7 -3
- mindspore/common/tensor.py +209 -29
- mindspore/communication/__init__.py +1 -1
- mindspore/communication/_comm_helper.py +38 -3
- mindspore/communication/comm_func.py +310 -55
- mindspore/communication/management.py +14 -14
- mindspore/context.py +123 -22
- mindspore/dataset/__init__.py +1 -1
- mindspore/dataset/audio/__init__.py +1 -1
- mindspore/dataset/core/config.py +7 -0
- mindspore/dataset/core/validator_helpers.py +7 -0
- mindspore/dataset/engine/cache_client.py +1 -1
- mindspore/dataset/engine/datasets.py +72 -44
- mindspore/dataset/engine/datasets_audio.py +7 -7
- mindspore/dataset/engine/datasets_standard_format.py +53 -3
- mindspore/dataset/engine/datasets_text.py +20 -20
- mindspore/dataset/engine/datasets_user_defined.py +174 -104
- mindspore/dataset/engine/datasets_vision.py +33 -33
- mindspore/dataset/engine/iterators.py +29 -0
- mindspore/dataset/engine/obs/util.py +7 -0
- mindspore/dataset/engine/queue.py +114 -60
- mindspore/dataset/engine/serializer_deserializer.py +2 -2
- mindspore/dataset/engine/validators.py +34 -14
- mindspore/dataset/text/__init__.py +1 -4
- mindspore/dataset/transforms/__init__.py +0 -3
- mindspore/dataset/utils/line_reader.py +2 -0
- mindspore/dataset/vision/__init__.py +1 -4
- mindspore/dataset/vision/utils.py +1 -1
- mindspore/dataset/vision/validators.py +2 -1
- mindspore/dnnl.dll +0 -0
- mindspore/dpcmi.dll +0 -0
- mindspore/{nn/extend → experimental/es}/__init__.py +4 -11
- mindspore/experimental/es/embedding_service.py +883 -0
- mindspore/{nn/layer → experimental/es}/embedding_service_layer.py +218 -30
- mindspore/experimental/llm_boost/__init__.py +21 -0
- mindspore/{nn/extend/layer → experimental/llm_boost/atb}/__init__.py +4 -8
- mindspore/experimental/llm_boost/atb/boost_base.py +211 -0
- mindspore/experimental/llm_boost/atb/llama_boost.py +115 -0
- mindspore/experimental/llm_boost/atb/qwen_boost.py +101 -0
- mindspore/experimental/llm_boost/register.py +129 -0
- mindspore/experimental/llm_boost/utils.py +31 -0
- mindspore/experimental/optim/adamw.py +85 -0
- mindspore/experimental/optim/optimizer.py +3 -0
- mindspore/hal/__init__.py +3 -3
- mindspore/hal/contiguous_tensors_handle.py +175 -0
- mindspore/hal/stream.py +18 -0
- mindspore/include/api/model_group.h +13 -1
- mindspore/include/api/types.h +10 -10
- mindspore/include/dataset/config.h +2 -2
- mindspore/include/dataset/constants.h +2 -2
- mindspore/include/dataset/execute.h +2 -2
- mindspore/include/dataset/vision.h +4 -0
- mindspore/jpeg62.dll +0 -0
- mindspore/log.py +1 -1
- mindspore/mindrecord/filewriter.py +68 -51
- mindspore/mindspore_backend.dll +0 -0
- mindspore/mindspore_common.dll +0 -0
- mindspore/mindspore_core.dll +0 -0
- mindspore/mindspore_glog.dll +0 -0
- mindspore/mindspore_np_dtype.dll +0 -0
- mindspore/mindspore_ops.dll +0 -0
- mindspore/mint/__init__.py +495 -46
- mindspore/mint/distributed/__init__.py +31 -0
- mindspore/mint/distributed/distributed.py +254 -0
- mindspore/mint/nn/__init__.py +266 -21
- mindspore/mint/nn/functional.py +125 -19
- mindspore/mint/nn/layer/__init__.py +39 -0
- mindspore/mint/nn/layer/activation.py +133 -0
- mindspore/mint/nn/layer/normalization.py +477 -0
- mindspore/mint/nn/layer/pooling.py +110 -0
- mindspore/mint/optim/adamw.py +28 -7
- mindspore/mint/special/__init__.py +63 -0
- mindspore/msobj140.dll +0 -0
- mindspore/mspdb140.dll +0 -0
- mindspore/mspdbcore.dll +0 -0
- mindspore/mspdbst.dll +0 -0
- mindspore/mspft140.dll +0 -0
- mindspore/msvcdis140.dll +0 -0
- mindspore/msvcp140_1.dll +0 -0
- mindspore/msvcp140_2.dll +0 -0
- mindspore/msvcp140_atomic_wait.dll +0 -0
- mindspore/msvcp140_codecvt_ids.dll +0 -0
- mindspore/multiprocessing/__init__.py +2 -1
- mindspore/nn/__init__.py +0 -1
- mindspore/nn/cell.py +275 -93
- mindspore/nn/layer/activation.py +211 -44
- mindspore/nn/layer/basic.py +113 -3
- mindspore/nn/layer/embedding.py +120 -2
- mindspore/nn/layer/normalization.py +101 -5
- mindspore/nn/layer/padding.py +34 -48
- mindspore/nn/layer/pooling.py +161 -7
- mindspore/nn/layer/transformer.py +3 -3
- mindspore/nn/loss/__init__.py +2 -2
- mindspore/nn/loss/loss.py +84 -6
- mindspore/nn/optim/__init__.py +2 -1
- mindspore/nn/optim/adadelta.py +1 -1
- mindspore/nn/optim/adam.py +1 -1
- mindspore/nn/optim/lamb.py +1 -1
- mindspore/nn/optim/tft_wrapper.py +127 -0
- mindspore/nn/wrap/cell_wrapper.py +12 -23
- mindspore/nn/wrap/grad_reducer.py +5 -5
- mindspore/nn/wrap/loss_scale.py +17 -3
- mindspore/numpy/__init__.py +1 -1
- mindspore/numpy/array_creations.py +65 -68
- mindspore/numpy/array_ops.py +64 -60
- mindspore/numpy/fft.py +610 -75
- mindspore/numpy/logic_ops.py +11 -10
- mindspore/numpy/math_ops.py +85 -84
- mindspore/numpy/utils_const.py +4 -4
- mindspore/opencv_core452.dll +0 -0
- mindspore/opencv_imgcodecs452.dll +0 -0
- mindspore/opencv_imgproc452.dll +0 -0
- mindspore/ops/__init__.py +6 -4
- mindspore/ops/_grad_experimental/grad_comm_ops.py +47 -3
- mindspore/ops/_grad_experimental/grad_math_ops.py +0 -22
- mindspore/ops/_vmap/vmap_array_ops.py +2 -4
- mindspore/ops/_vmap/vmap_math_ops.py +17 -1
- mindspore/ops/_vmap/vmap_nn_ops.py +43 -2
- mindspore/ops/auto_generate/cpp_create_prim_instance_helper.py +85 -7
- mindspore/ops/auto_generate/gen_arg_dtype_cast.py +2 -0
- mindspore/ops/auto_generate/gen_extend_func.py +734 -13
- mindspore/ops/auto_generate/gen_ops_def.py +2420 -381
- mindspore/ops/auto_generate/gen_ops_prim.py +5196 -1659
- mindspore/ops/auto_generate/pyboost_inner_prim.py +176 -56
- mindspore/ops/composite/base.py +85 -48
- mindspore/ops/composite/multitype_ops/_compile_utils.py +1 -0
- mindspore/ops/composite/multitype_ops/not_in_impl.py +2 -2
- mindspore/ops/function/__init__.py +22 -0
- mindspore/ops/function/array_func.py +490 -153
- mindspore/ops/function/debug_func.py +113 -1
- mindspore/ops/function/fft_func.py +15 -2
- mindspore/ops/function/grad/grad_func.py +3 -2
- mindspore/ops/function/math_func.py +558 -207
- mindspore/ops/function/nn_func.py +817 -383
- mindspore/ops/function/other_func.py +3 -2
- mindspore/ops/function/random_func.py +184 -8
- mindspore/ops/function/reshard_func.py +13 -11
- mindspore/ops/function/sparse_unary_func.py +1 -1
- mindspore/ops/function/vmap_func.py +3 -2
- mindspore/ops/functional.py +24 -14
- mindspore/ops/op_info_register.py +3 -3
- mindspore/ops/operations/__init__.py +6 -1
- mindspore/ops/operations/_grad_ops.py +2 -76
- mindspore/ops/operations/_infer_ops.py +1 -1
- mindspore/ops/operations/_inner_ops.py +71 -94
- mindspore/ops/operations/array_ops.py +12 -146
- mindspore/ops/operations/comm_ops.py +42 -53
- mindspore/ops/operations/custom_ops.py +83 -19
- mindspore/ops/operations/debug_ops.py +42 -10
- mindspore/ops/operations/manually_defined/_inner.py +12 -0
- mindspore/ops/operations/manually_defined/ops_def.py +265 -10
- mindspore/ops/operations/math_ops.py +12 -223
- mindspore/ops/operations/nn_ops.py +20 -114
- mindspore/ops/operations/other_ops.py +7 -4
- mindspore/ops/operations/random_ops.py +46 -1
- mindspore/ops/primitive.py +18 -6
- mindspore/ops_generate/arg_dtype_cast.py +2 -0
- mindspore/ops_generate/gen_aclnn_implement.py +11 -11
- mindspore/ops_generate/gen_constants.py +36 -0
- mindspore/ops_generate/gen_ops.py +67 -52
- mindspore/ops_generate/gen_ops_inner_prim.py +1 -1
- mindspore/ops_generate/gen_pyboost_func.py +131 -47
- mindspore/ops_generate/op_proto.py +10 -3
- mindspore/ops_generate/pyboost_utils.py +14 -1
- mindspore/ops_generate/template.py +43 -21
- mindspore/parallel/__init__.py +3 -1
- mindspore/parallel/_auto_parallel_context.py +28 -8
- mindspore/parallel/_cell_wrapper.py +83 -0
- mindspore/parallel/_parallel_serialization.py +47 -19
- mindspore/parallel/_tensor.py +81 -11
- mindspore/parallel/_utils.py +13 -1
- mindspore/parallel/algo_parameter_config.py +5 -5
- mindspore/parallel/checkpoint_transform.py +46 -39
- mindspore/parallel/cluster/process_entity/__init__.py +1 -1
- mindspore/parallel/cluster/process_entity/_api.py +31 -23
- mindspore/parallel/cluster/process_entity/_utils.py +2 -27
- mindspore/parallel/parameter_broadcast.py +3 -4
- mindspore/parallel/shard.py +162 -31
- mindspore/parallel/transform_safetensors.py +993 -0
- mindspore/pgodb140.dll +0 -0
- mindspore/pgort140.dll +0 -0
- mindspore/profiler/__init__.py +2 -1
- mindspore/profiler/common/constant.py +29 -0
- mindspore/profiler/common/registry.py +47 -0
- mindspore/profiler/common/util.py +28 -0
- mindspore/profiler/dynamic_profiler.py +694 -0
- mindspore/profiler/envprofiling.py +17 -19
- mindspore/profiler/parser/ascend_analysis/constant.py +18 -0
- mindspore/profiler/parser/ascend_analysis/file_manager.py +25 -4
- mindspore/profiler/parser/ascend_analysis/function_event.py +43 -19
- mindspore/profiler/parser/ascend_analysis/fwk_cann_parser.py +31 -26
- mindspore/profiler/parser/ascend_analysis/fwk_file_parser.py +56 -10
- mindspore/profiler/parser/ascend_analysis/msprof_timeline_parser.py +55 -8
- mindspore/profiler/parser/ascend_analysis/path_manager.py +313 -0
- mindspore/profiler/parser/ascend_analysis/profiler_info_parser.py +27 -20
- mindspore/profiler/parser/ascend_analysis/trace_event_manager.py +9 -2
- mindspore/profiler/parser/ascend_msprof_exporter.py +5 -4
- mindspore/profiler/parser/ascend_timeline_generator.py +27 -25
- mindspore/profiler/parser/base_timeline_generator.py +19 -25
- mindspore/profiler/parser/cpu_gpu_timeline_generator.py +25 -12
- mindspore/profiler/parser/framework_parser.py +1 -391
- mindspore/profiler/parser/gpu_analysis/__init__.py +14 -0
- mindspore/profiler/parser/gpu_analysis/function_event.py +44 -0
- mindspore/profiler/parser/gpu_analysis/fwk_file_parser.py +89 -0
- mindspore/profiler/parser/gpu_analysis/profiler_info_parser.py +72 -0
- mindspore/profiler/parser/memory_usage_parser.py +0 -154
- mindspore/profiler/parser/profiler_info.py +78 -6
- mindspore/profiler/profiler.py +153 -0
- mindspore/profiler/profiling.py +280 -412
- mindspore/rewrite/__init__.py +1 -2
- mindspore/rewrite/common/namespace.py +4 -4
- mindspore/rewrite/symbol_tree/symbol_tree.py +3 -3
- mindspore/run_check/_check_version.py +36 -103
- mindspore/safeguard/rewrite_obfuscation.py +591 -247
- mindspore/swresample-4.dll +0 -0
- mindspore/swscale-6.dll +0 -0
- mindspore/tbbmalloc.dll +0 -0
- mindspore/tinyxml2.dll +0 -0
- mindspore/train/__init__.py +4 -3
- mindspore/train/_utils.py +28 -2
- mindspore/train/amp.py +171 -53
- mindspore/train/callback/__init__.py +2 -2
- mindspore/train/callback/_callback.py +4 -4
- mindspore/train/callback/_checkpoint.py +85 -22
- mindspore/train/callback/_cluster_monitor.py +1 -1
- mindspore/train/callback/_flops_collector.py +1 -0
- mindspore/train/callback/_loss_monitor.py +3 -3
- mindspore/train/callback/_on_request_exit.py +134 -31
- mindspore/train/callback/_summary_collector.py +5 -5
- mindspore/train/callback/_tft_register.py +352 -0
- mindspore/train/dataset_helper.py +7 -3
- mindspore/train/metrics/metric.py +3 -3
- mindspore/train/metrics/roc.py +4 -4
- mindspore/train/mind_ir_pb2.py +44 -39
- mindspore/train/model.py +134 -58
- mindspore/train/serialization.py +336 -112
- mindspore/turbojpeg.dll +0 -0
- mindspore/utils/__init__.py +21 -0
- mindspore/utils/utils.py +60 -0
- mindspore/vcmeta.dll +0 -0
- mindspore/vcruntime140.dll +0 -0
- mindspore/vcruntime140_1.dll +0 -0
- mindspore/version.py +1 -1
- {mindspore-2.3.0.dist-info → mindspore-2.4.0.dist-info}/METADATA +6 -2
- {mindspore-2.3.0.dist-info → mindspore-2.4.0.dist-info}/RECORD +281 -275
- mindspore/include/c_api/ms/abstract.h +0 -67
- mindspore/include/c_api/ms/attribute.h +0 -197
- mindspore/include/c_api/ms/base/handle_types.h +0 -43
- mindspore/include/c_api/ms/base/macros.h +0 -32
- mindspore/include/c_api/ms/base/status.h +0 -33
- mindspore/include/c_api/ms/base/types.h +0 -283
- mindspore/include/c_api/ms/context.h +0 -102
- mindspore/include/c_api/ms/graph.h +0 -160
- mindspore/include/c_api/ms/node.h +0 -606
- mindspore/include/c_api/ms/tensor.h +0 -161
- mindspore/include/c_api/ms/value.h +0 -84
- mindspore/mindspore_shared_lib.dll +0 -0
- mindspore/nn/extend/basic.py +0 -140
- mindspore/nn/extend/embedding.py +0 -143
- mindspore/nn/extend/layer/normalization.py +0 -109
- mindspore/nn/extend/pooling.py +0 -117
- mindspore/nn/layer/embedding_service.py +0 -531
- mindspore/ops/_op_impl/aicpu/strided_slice_v2.py +0 -93
- mindspore/ops/_op_impl/aicpu/strided_slice_v2_grad.py +0 -66
- mindspore/ops/extend/__init__.py +0 -53
- mindspore/ops/extend/array_func.py +0 -218
- mindspore/ops/extend/math_func.py +0 -76
- mindspore/ops/extend/nn_func.py +0 -308
- mindspore/ops/silent_check.py +0 -162
- mindspore/profiler/parser/msadvisor_analyzer.py +0 -82
- mindspore/profiler/parser/msadvisor_parser.py +0 -240
- mindspore/train/callback/_mindio_ttp.py +0 -443
- {mindspore-2.3.0.dist-info → mindspore-2.4.0.dist-info}/WHEEL +0 -0
- {mindspore-2.3.0.dist-info → mindspore-2.4.0.dist-info}/entry_points.txt +0 -0
- {mindspore-2.3.0.dist-info → mindspore-2.4.0.dist-info}/top_level.txt +0 -0
mindspore/mint/nn/functional.py
CHANGED
|
@@ -14,7 +14,8 @@
|
|
|
14
14
|
# ============================================================================
|
|
15
15
|
"""mint nn functional."""
|
|
16
16
|
from __future__ import absolute_import
|
|
17
|
-
|
|
17
|
+
import mindspore.ops as ops
|
|
18
|
+
from mindspore.ops.function.nn_func import max_pool2d_ext as max_pool2d
|
|
18
19
|
from mindspore.ops.functional import (
|
|
19
20
|
conv_transpose2d,
|
|
20
21
|
grid_sample
|
|
@@ -26,7 +27,7 @@ from mindspore.ops.functional import (
|
|
|
26
27
|
# 3
|
|
27
28
|
|
|
28
29
|
# 4
|
|
29
|
-
|
|
30
|
+
from mindspore.ops.function.nn_func import interpolate_ext as interpolate
|
|
30
31
|
# 5
|
|
31
32
|
from mindspore.ops.function.nn_func import pad_ext as pad
|
|
32
33
|
# 6
|
|
@@ -36,7 +37,7 @@ from mindspore.ops.auto_generate import fold_ext as fold
|
|
|
36
37
|
# 8
|
|
37
38
|
from mindspore.ops.functional import layer_norm
|
|
38
39
|
# 9
|
|
39
|
-
|
|
40
|
+
|
|
40
41
|
# 10
|
|
41
42
|
|
|
42
43
|
# 11
|
|
@@ -50,11 +51,9 @@ from mindspore.ops.function.nn_func import dropout_ext as dropout
|
|
|
50
51
|
# 15
|
|
51
52
|
|
|
52
53
|
# 16
|
|
53
|
-
|
|
54
|
-
# 17
|
|
55
|
-
from mindspore.ops.function.nn_func import binary_cross_entropy
|
|
54
|
+
from mindspore.ops.function.nn_func import log_softmax_ext as log_softmax
|
|
56
55
|
# 18
|
|
57
|
-
|
|
56
|
+
from mindspore.ops.auto_generate import prelu
|
|
58
57
|
# 19
|
|
59
58
|
|
|
60
59
|
# 20
|
|
@@ -147,7 +146,6 @@ from mindspore.ops.functional import embedding
|
|
|
147
146
|
# 63
|
|
148
147
|
|
|
149
148
|
# 64
|
|
150
|
-
from mindspore.ops.extend import one_hot as one_hot_ext
|
|
151
149
|
|
|
152
150
|
# 65
|
|
153
151
|
|
|
@@ -204,7 +202,7 @@ from mindspore.ops.function.nn_func import avg_pool2d_ext as avg_pool2d
|
|
|
204
202
|
# 91
|
|
205
203
|
|
|
206
204
|
# 92
|
|
207
|
-
from mindspore.ops.
|
|
205
|
+
from mindspore.ops.auto_generate import leaky_relu_ext as leaky_relu
|
|
208
206
|
# 93
|
|
209
207
|
from mindspore.ops.auto_generate import softplus_ext as softplus # pylint: disable=W0611
|
|
210
208
|
# 94
|
|
@@ -218,14 +216,107 @@ from mindspore.ops.function.math_func import tanh
|
|
|
218
216
|
# 98
|
|
219
217
|
|
|
220
218
|
# 99
|
|
221
|
-
|
|
219
|
+
from mindspore.ops.auto_generate import selu_ext as selu # pylint: disable=W0611
|
|
222
220
|
# 100
|
|
223
|
-
from mindspore.ops.
|
|
221
|
+
from mindspore.ops.auto_generate import softshrink # pylint: disable=W0611
|
|
222
|
+
# 220
|
|
223
|
+
from mindspore.ops.function.nn_func import hardshrink # pylint: disable=W0611
|
|
224
|
+
# 221
|
|
225
|
+
from mindspore.ops.function.nn_func import hardsigmoid # pylint: disable=W0611
|
|
226
|
+
# 222
|
|
227
|
+
from mindspore.ops.function.nn_func import hardswish # pylint: disable=W0611
|
|
228
|
+
# 267
|
|
229
|
+
from mindspore.ops.auto_generate import mish_ext as mish # pylint: disable=W0611
|
|
230
|
+
# 238
|
|
231
|
+
from mindspore.ops.auto_generate import l1_loss_ext as l1_loss # pylint: disable=W0611
|
|
232
|
+
|
|
233
|
+
# 257
|
|
234
|
+
|
|
235
|
+
# 258
|
|
236
|
+
from mindspore.ops.function.nn_func import mse_loss_ext as mse_loss
|
|
237
|
+
# 259
|
|
238
|
+
|
|
224
239
|
# 323
|
|
225
240
|
|
|
226
241
|
# 324
|
|
227
242
|
from mindspore.ops.auto_generate import elu_ext as elu
|
|
228
|
-
|
|
243
|
+
|
|
244
|
+
# 556
|
|
245
|
+
from mindspore.ops.function.nn_func import logsigmoid_ext as logsigmoid
|
|
246
|
+
|
|
247
|
+
from mindspore.ops.auto_generate import adaptive_avg_pool1d
|
|
248
|
+
|
|
249
|
+
from mindspore.ops.functional import adaptive_avg_pool2d_ext as adaptive_avg_pool2d
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def binary_cross_entropy(input, target, weight=None, reduction='mean'):
|
|
253
|
+
r"""
|
|
254
|
+
Computes the binary cross entropy(Measure the difference information between two probability distributions) between
|
|
255
|
+
predictive value `input` and target value `target`.
|
|
256
|
+
|
|
257
|
+
Set `input` as :math:`x`, `target` as :math:`y`, output as :math:`\ell(x, y)`, the
|
|
258
|
+
weight of nth batch of binary cross entropy is :math:`w_n`.
|
|
259
|
+
Let,
|
|
260
|
+
|
|
261
|
+
.. math::
|
|
262
|
+
L = \{l_1,\dots,l_N\}^\top, \quad
|
|
263
|
+
l_n = - w_n \left[ y_n \cdot \log x_n + (1 - y_n) \cdot \log (1 - x_n) \right]
|
|
264
|
+
|
|
265
|
+
In which, :math:`L` indicates the loss of all `batch_size`, :math:`l` indicates the loss of one `batch_size`,
|
|
266
|
+
and :math:`n` indicates one `batch_size` in the :math:`1-N` range. Then,
|
|
267
|
+
|
|
268
|
+
.. math::
|
|
269
|
+
\ell(x, y) = \begin{cases}
|
|
270
|
+
L, & \text{if reduction} = \text{'none';}\\
|
|
271
|
+
\operatorname{mean}(L), & \text{if reduction} = \text{'mean';}\\
|
|
272
|
+
\operatorname{sum}(L), & \text{if reduction} = \text{'sum'.}
|
|
273
|
+
\end{cases}
|
|
274
|
+
|
|
275
|
+
.. warning::
|
|
276
|
+
- The value of `input` must range from `0` to `l`.
|
|
277
|
+
|
|
278
|
+
Args:
|
|
279
|
+
input (Tensor): The predictive value whose data type must be float16 or float32.
|
|
280
|
+
target (Tensor): The target value which has the same shape and data type as `input`.
|
|
281
|
+
And the data type is float16 or float32.
|
|
282
|
+
weight (Tensor, optional): A rescaling weight applied to the loss of each batch element.
|
|
283
|
+
Its shape must be able to broadcast to that of `input` and `target`.
|
|
284
|
+
And it must have the same shape and data type as `input`. Default: ``None`` . If set to ``None`` ,
|
|
285
|
+
the loss function
|
|
286
|
+
will not consider any sample weights, and each sample will be treated as having equal importance
|
|
287
|
+
when calculating the loss.
|
|
288
|
+
reduction (str, optional): Apply specific reduction method to the output: ``'none'`` , ``'mean'`` ,
|
|
289
|
+
``'sum'`` . Default: ``'mean'`` .
|
|
290
|
+
|
|
291
|
+
- ``'none'``: no reduction will be applied.
|
|
292
|
+
- ``'mean'``: compute and return the weighted mean of elements in the output.
|
|
293
|
+
- ``'sum'``: the output elements will be summed.
|
|
294
|
+
|
|
295
|
+
Returns:
|
|
296
|
+
Tensor or Scalar. Returns Tensor that has the same dtype and shape as `input` if `reduction` is ``'none'``.
|
|
297
|
+
Otherwise, returns a scalar Tensor.
|
|
298
|
+
|
|
299
|
+
Raises:
|
|
300
|
+
TypeError: If `input`, `target` or `weight` is not a Tensor.
|
|
301
|
+
TypeError: If dtype of `input`, `target` or `weight` (if given) is neither float16 nor float32.
|
|
302
|
+
ValueError: If `reduction` is not one of ``'none'``, ``'mean'`` or ``'sum'``.
|
|
303
|
+
ValueError: If shape of `target` is not the same as `input` or `weight` (if given).
|
|
304
|
+
|
|
305
|
+
Supported Platforms:
|
|
306
|
+
``Ascend`` ``GPU`` ``CPU``
|
|
307
|
+
|
|
308
|
+
Examples:
|
|
309
|
+
>>> import mindspore
|
|
310
|
+
>>> import numpy as np
|
|
311
|
+
>>> from mindspore import Tensor, ops
|
|
312
|
+
>>> input = Tensor(np.array([0.2, 0.7, 0.1]), mindspore.float32)
|
|
313
|
+
>>> target = Tensor(np.array([0., 1., 0.]), mindspore.float32)
|
|
314
|
+
>>> weight = Tensor(np.array([1, 2, 2]), mindspore.float32)
|
|
315
|
+
>>> output = mint.nn.functional.binary_cross_entropy(input, target, weight)
|
|
316
|
+
>>> print(output)
|
|
317
|
+
0.38240486
|
|
318
|
+
"""
|
|
319
|
+
return ops.function.binary_cross_entropy(input, target, weight, reduction)
|
|
229
320
|
|
|
230
321
|
|
|
231
322
|
def binary_cross_entropy_with_logits(input, target, weight=None, reduction='mean', pos_weight=None):
|
|
@@ -317,11 +408,11 @@ def binary_cross_entropy_with_logits(input, target, weight=None, reduction='mean
|
|
|
317
408
|
>>> target = Tensor(np.array([[0.3, 0.8, 1.2], [-0.6, 0.1, 2.2]]), mindspore.float32)
|
|
318
409
|
>>> weight = Tensor(np.array([1.0, 1.0, 1.0]), mindspore.float32)
|
|
319
410
|
>>> pos_weight = Tensor(np.array([1.0, 1.0, 1.0]), mindspore.float32)
|
|
320
|
-
>>> output = mint.nn.functional.binary_cross_entropy_with_logits(input, target, weight, pos_weight)
|
|
411
|
+
>>> output = mint.nn.functional.binary_cross_entropy_with_logits(input, target, weight, 'mean', pos_weight)
|
|
321
412
|
>>> print(output)
|
|
322
413
|
0.3463612
|
|
323
414
|
"""
|
|
324
|
-
return
|
|
415
|
+
return ops.function.binary_cross_entropy_with_logits(input, target, weight, pos_weight, reduction)
|
|
325
416
|
|
|
326
417
|
|
|
327
418
|
def one_hot(tensor, num_classes=-1):
|
|
@@ -359,7 +450,7 @@ def one_hot(tensor, num_classes=-1):
|
|
|
359
450
|
[0 1 0]
|
|
360
451
|
[0 0 1]]
|
|
361
452
|
"""
|
|
362
|
-
return one_hot_ext(tensor, num_classes)
|
|
453
|
+
return ops.function.array_func.one_hot_ext(tensor, num_classes)
|
|
363
454
|
|
|
364
455
|
|
|
365
456
|
__all__ = [
|
|
@@ -372,7 +463,7 @@ __all__ = [
|
|
|
372
463
|
# 3
|
|
373
464
|
|
|
374
465
|
# 4
|
|
375
|
-
|
|
466
|
+
"interpolate",
|
|
376
467
|
# 5
|
|
377
468
|
'pad',
|
|
378
469
|
# 6
|
|
@@ -382,7 +473,7 @@ __all__ = [
|
|
|
382
473
|
# 8
|
|
383
474
|
'layer_norm',
|
|
384
475
|
# 9
|
|
385
|
-
|
|
476
|
+
|
|
386
477
|
# 10
|
|
387
478
|
|
|
388
479
|
# 11
|
|
@@ -396,11 +487,11 @@ __all__ = [
|
|
|
396
487
|
# 15
|
|
397
488
|
|
|
398
489
|
# 16
|
|
399
|
-
|
|
490
|
+
'log_softmax',
|
|
400
491
|
# 17
|
|
401
492
|
|
|
402
493
|
# 18
|
|
403
|
-
|
|
494
|
+
'prelu',
|
|
404
495
|
# 19
|
|
405
496
|
'binary_cross_entropy',
|
|
406
497
|
# 20
|
|
@@ -570,4 +661,19 @@ __all__ = [
|
|
|
570
661
|
# 324
|
|
571
662
|
'elu',
|
|
572
663
|
# 325
|
|
664
|
+
|
|
665
|
+
#556
|
|
666
|
+
'logsigmoid',
|
|
667
|
+
|
|
668
|
+
# 257
|
|
669
|
+
|
|
670
|
+
# 258
|
|
671
|
+
'mse_loss',
|
|
672
|
+
# 259
|
|
673
|
+
|
|
674
|
+
'adaptive_avg_pool1d',
|
|
675
|
+
|
|
676
|
+
'adaptive_avg_pool2d',
|
|
677
|
+
|
|
678
|
+
|
|
573
679
|
]
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# Copyright 2024 Huawei Technologies Co., Ltd
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ============================================================================
|
|
15
|
+
"""
|
|
16
|
+
Layer.
|
|
17
|
+
|
|
18
|
+
The high-level components(Cells) used to construct the neural network.
|
|
19
|
+
"""
|
|
20
|
+
from __future__ import absolute_import
|
|
21
|
+
|
|
22
|
+
from mindspore.mint.nn.layer import normalization
|
|
23
|
+
from mindspore.mint.nn.layer import activation
|
|
24
|
+
from mindspore.mint.nn.layer import pooling
|
|
25
|
+
from mindspore.mint.nn.layer.normalization import GroupNorm
|
|
26
|
+
from mindspore.mint.nn.layer.normalization import BatchNorm1d
|
|
27
|
+
from mindspore.mint.nn.layer.normalization import BatchNorm2d
|
|
28
|
+
from mindspore.mint.nn.layer.normalization import BatchNorm3d
|
|
29
|
+
from mindspore.mint.nn.layer.normalization import LayerNorm
|
|
30
|
+
from mindspore.mint.nn.layer.activation import LogSigmoid
|
|
31
|
+
from mindspore.mint.nn.layer.activation import SiLU
|
|
32
|
+
from mindspore.mint.nn.layer.pooling import AdaptiveAvgPool1d
|
|
33
|
+
from mindspore.mint.nn.layer.pooling import AdaptiveAvgPool2d
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
__all__ = []
|
|
37
|
+
__all__.extend(normalization.__all__)
|
|
38
|
+
__all__.extend(activation.__all__)
|
|
39
|
+
__all__.extend(pooling.__all__)
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
# Copyright 2020-2024 Huawei Technologies Co., Ltd
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
# ============================================================================
|
|
15
|
+
"""activation layer for mint"""
|
|
16
|
+
from __future__ import absolute_import
|
|
17
|
+
from __future__ import division
|
|
18
|
+
|
|
19
|
+
from mindspore import mint
|
|
20
|
+
from mindspore.nn.cell import Cell
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class SiLU(Cell):
|
|
24
|
+
r"""
|
|
25
|
+
Calculates the SiLU activation function element-wise. It is also sometimes referred to as Swish
|
|
26
|
+
function.
|
|
27
|
+
|
|
28
|
+
The SiLU function is defined as follows:
|
|
29
|
+
|
|
30
|
+
.. math::
|
|
31
|
+
|
|
32
|
+
\text{SiLU}(x) = x * \sigma(x),
|
|
33
|
+
|
|
34
|
+
where :math:`x_i` is an element of the input, :math:`\sigma(x)` is Sigmoid function.
|
|
35
|
+
|
|
36
|
+
.. math::
|
|
37
|
+
|
|
38
|
+
\text{sigmoid}(x_i) = \frac{1}{1 + \exp(-x_i)},
|
|
39
|
+
|
|
40
|
+
SiLU Activation Function Graph:
|
|
41
|
+
|
|
42
|
+
.. image:: ../images/SiLU.png
|
|
43
|
+
:align: center
|
|
44
|
+
|
|
45
|
+
.. warning::
|
|
46
|
+
This is an experimental API that is subject to change or deletion.
|
|
47
|
+
|
|
48
|
+
Inputs:
|
|
49
|
+
- **input** (Tensor) - `input` is :math:`x` in the preceding formula.
|
|
50
|
+
Input with the data type float16 or float32. Tensor of any dimension.
|
|
51
|
+
|
|
52
|
+
Outputs:
|
|
53
|
+
Tensor, with the same type and shape as the `input`.
|
|
54
|
+
|
|
55
|
+
Raises:
|
|
56
|
+
TypeError: If dtype of `input` is neither float16 nor float32.
|
|
57
|
+
|
|
58
|
+
Supported Platforms:
|
|
59
|
+
``Ascend``
|
|
60
|
+
|
|
61
|
+
Examples:
|
|
62
|
+
>>> import mindspore
|
|
63
|
+
>>> from mindspore import Tensor, mint
|
|
64
|
+
>>> import numpy as np
|
|
65
|
+
>>> input = Tensor(np.array([-1, 2, -3, 2, -1]), mindspore.float16)
|
|
66
|
+
>>> silu = mint.nn.SiLU()
|
|
67
|
+
>>> output = silu(input)
|
|
68
|
+
>>> print(output)
|
|
69
|
+
[-0.269 1.762 -0.1423 1.762 -0.269]
|
|
70
|
+
"""
|
|
71
|
+
|
|
72
|
+
def __init__(self):
|
|
73
|
+
"""Initialize SiLU."""
|
|
74
|
+
super(SiLU, self).__init__()
|
|
75
|
+
|
|
76
|
+
def construct(self, x):
|
|
77
|
+
return mint.nn.functional.silu(x)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class LogSigmoid(Cell):
|
|
81
|
+
r"""
|
|
82
|
+
Applies logsigmoid activation element-wise. The input is a Tensor with any valid shape.
|
|
83
|
+
|
|
84
|
+
Logsigmoid is defined as:
|
|
85
|
+
|
|
86
|
+
.. math::
|
|
87
|
+
\text{logsigmoid}(x_{i}) = \log(\frac{1}{1 + \exp(-x_i)}),
|
|
88
|
+
|
|
89
|
+
where :math:`x_{i}` is the element of the input.
|
|
90
|
+
|
|
91
|
+
LogSigmoid Activation Function Graph:
|
|
92
|
+
|
|
93
|
+
.. image:: ../images/LogSigmoid.png
|
|
94
|
+
:align: center
|
|
95
|
+
|
|
96
|
+
.. warning::
|
|
97
|
+
This is an experimental API that is subject to change or deletion.
|
|
98
|
+
|
|
99
|
+
Inputs:
|
|
100
|
+
- **input** (Tensor) - The input of LogSigmoid with data type of bfloat16, float16 or float32.
|
|
101
|
+
The shape is :math:`(*)` where :math:`*` means, any number of additional dimensions.
|
|
102
|
+
|
|
103
|
+
Outputs:
|
|
104
|
+
Tensor, with the same type and shape as the `input`.
|
|
105
|
+
|
|
106
|
+
Raises:
|
|
107
|
+
TypeError: If dtype of `input` is not bfloat16, float16 and float32.
|
|
108
|
+
TypeError: If `input` is not a Tensor.
|
|
109
|
+
|
|
110
|
+
Supported Platforms:
|
|
111
|
+
``Ascend``
|
|
112
|
+
|
|
113
|
+
Examples:
|
|
114
|
+
>>> import mindspore
|
|
115
|
+
>>> from mindspore import Tensor
|
|
116
|
+
>>> net = mint.nn.LogSigmoid()
|
|
117
|
+
>>> input = Tensor([1.0, 2.0, 3.0], mindspore.float32)
|
|
118
|
+
>>> output = net(input)
|
|
119
|
+
>>> print(output)
|
|
120
|
+
[-0.31326166 -0.12692806 -0.04858734]
|
|
121
|
+
"""
|
|
122
|
+
def __init__(self):
|
|
123
|
+
"""Initialize LogSigmoid."""
|
|
124
|
+
super(LogSigmoid, self).__init__()
|
|
125
|
+
|
|
126
|
+
def construct(self, input):
|
|
127
|
+
return mint.nn.functional.logsigmoid(input)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
__all__ = [
|
|
131
|
+
'LogSigmoid',
|
|
132
|
+
'SiLU',
|
|
133
|
+
]
|