onnx 1.16.1__cp312-cp312-win32.whl → 1.17.0__cp312-cp312-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of onnx might be problematic. Click here for more details.
- onnx/__init__.py +3 -1
- onnx/_custom_element_types.py +63 -0
- onnx/backend/base.py +17 -15
- onnx/backend/sample/ops/__init__.py +4 -4
- onnx/backend/sample/ops/abs.py +1 -0
- onnx/backend/test/__init__.py +1 -0
- onnx/backend/test/case/__init__.py +2 -2
- onnx/backend/test/case/base.py +6 -5
- onnx/backend/test/case/model/__init__.py +4 -3
- onnx/backend/test/case/model/expand.py +1 -0
- onnx/backend/test/case/model/gradient.py +1 -0
- onnx/backend/test/case/model/sequence.py +3 -1
- onnx/backend/test/case/model/shrink.py +1 -0
- onnx/backend/test/case/model/sign.py +1 -0
- onnx/backend/test/case/model/single-relu.py +1 -0
- onnx/backend/test/case/model/stringnormalizer.py +1 -1
- onnx/backend/test/case/node/__init__.py +31 -22
- onnx/backend/test/case/node/_image_decoder_data.py +1 -0
- onnx/backend/test/case/node/abs.py +1 -0
- onnx/backend/test/case/node/acos.py +1 -0
- onnx/backend/test/case/node/acosh.py +1 -0
- onnx/backend/test/case/node/adagrad.py +2 -1
- onnx/backend/test/case/node/adam.py +4 -1
- onnx/backend/test/case/node/add.py +1 -0
- onnx/backend/test/case/node/affinegrid.py +1 -0
- onnx/backend/test/case/node/ai_onnx_ml/array_feature_extractor.py +1 -0
- onnx/backend/test/case/node/ai_onnx_ml/binarizer.py +1 -0
- onnx/backend/test/case/node/ai_onnx_ml/label_encoder.py +1 -0
- onnx/backend/test/case/node/ai_onnx_ml/tree_ensemble.py +1 -0
- onnx/backend/test/case/node/and.py +1 -0
- onnx/backend/test/case/node/argmax.py +1 -0
- onnx/backend/test/case/node/argmin.py +1 -0
- onnx/backend/test/case/node/asin.py +1 -0
- onnx/backend/test/case/node/asinh.py +1 -0
- onnx/backend/test/case/node/atan.py +1 -0
- onnx/backend/test/case/node/atanh.py +1 -0
- onnx/backend/test/case/node/averagepool.py +1 -0
- onnx/backend/test/case/node/batchnorm.py +1 -0
- onnx/backend/test/case/node/bernoulli.py +1 -0
- onnx/backend/test/case/node/bitshift.py +1 -0
- onnx/backend/test/case/node/bitwiseand.py +1 -0
- onnx/backend/test/case/node/bitwisenot.py +1 -0
- onnx/backend/test/case/node/bitwiseor.py +1 -0
- onnx/backend/test/case/node/bitwisexor.py +1 -0
- onnx/backend/test/case/node/blackmanwindow.py +13 -3
- onnx/backend/test/case/node/cast.py +2 -1
- onnx/backend/test/case/node/castlike.py +1 -0
- onnx/backend/test/case/node/ceil.py +1 -0
- onnx/backend/test/case/node/celu.py +1 -0
- onnx/backend/test/case/node/center_crop_pad.py +1 -0
- onnx/backend/test/case/node/clip.py +1 -0
- onnx/backend/test/case/node/col2im.py +1 -1
- onnx/backend/test/case/node/compress.py +1 -0
- onnx/backend/test/case/node/concat.py +3 -2
- onnx/backend/test/case/node/constant.py +1 -0
- onnx/backend/test/case/node/constantofshape.py +1 -0
- onnx/backend/test/case/node/conv.py +1 -0
- onnx/backend/test/case/node/convinteger.py +1 -0
- onnx/backend/test/case/node/convtranspose.py +135 -0
- onnx/backend/test/case/node/cos.py +1 -0
- onnx/backend/test/case/node/cosh.py +1 -0
- onnx/backend/test/case/node/cumsum.py +1 -0
- onnx/backend/test/case/node/deformconv.py +17 -26
- onnx/backend/test/case/node/depthtospace.py +1 -0
- onnx/backend/test/case/node/dequantizelinear.py +1 -0
- onnx/backend/test/case/node/det.py +1 -0
- onnx/backend/test/case/node/dft.py +1 -0
- onnx/backend/test/case/node/div.py +1 -0
- onnx/backend/test/case/node/dropout.py +1 -0
- onnx/backend/test/case/node/dynamicquantizelinear.py +1 -0
- onnx/backend/test/case/node/einsum.py +2 -3
- onnx/backend/test/case/node/elu.py +1 -0
- onnx/backend/test/case/node/equal.py +1 -0
- onnx/backend/test/case/node/erf.py +1 -0
- onnx/backend/test/case/node/exp.py +1 -0
- onnx/backend/test/case/node/expand.py +1 -0
- onnx/backend/test/case/node/eyelike.py +1 -0
- onnx/backend/test/case/node/flatten.py +1 -0
- onnx/backend/test/case/node/floor.py +1 -0
- onnx/backend/test/case/node/gather.py +1 -0
- onnx/backend/test/case/node/gatherelements.py +1 -0
- onnx/backend/test/case/node/gathernd.py +1 -0
- onnx/backend/test/case/node/gelu.py +1 -0
- onnx/backend/test/case/node/gemm.py +3 -4
- onnx/backend/test/case/node/globalaveragepool.py +1 -0
- onnx/backend/test/case/node/globalmaxpool.py +1 -0
- onnx/backend/test/case/node/greater.py +1 -0
- onnx/backend/test/case/node/greater_equal.py +1 -0
- onnx/backend/test/case/node/gridsample.py +1 -0
- onnx/backend/test/case/node/groupnormalization.py +1 -0
- onnx/backend/test/case/node/gru.py +3 -2
- onnx/backend/test/case/node/hammingwindow.py +13 -2
- onnx/backend/test/case/node/hannwindow.py +10 -2
- onnx/backend/test/case/node/hardmax.py +1 -0
- onnx/backend/test/case/node/hardsigmoid.py +1 -0
- onnx/backend/test/case/node/hardswish.py +1 -0
- onnx/backend/test/case/node/identity.py +1 -0
- onnx/backend/test/case/node/if.py +1 -0
- onnx/backend/test/case/node/instancenorm.py +1 -0
- onnx/backend/test/case/node/isinf.py +1 -0
- onnx/backend/test/case/node/isnan.py +1 -0
- onnx/backend/test/case/node/layernormalization.py +1 -0
- onnx/backend/test/case/node/leakyrelu.py +1 -0
- onnx/backend/test/case/node/less.py +1 -0
- onnx/backend/test/case/node/less_equal.py +1 -0
- onnx/backend/test/case/node/log.py +1 -0
- onnx/backend/test/case/node/logsoftmax.py +1 -0
- onnx/backend/test/case/node/loop.py +4 -3
- onnx/backend/test/case/node/lppool.py +1 -0
- onnx/backend/test/case/node/lrn.py +1 -0
- onnx/backend/test/case/node/lstm.py +3 -2
- onnx/backend/test/case/node/matmul.py +1 -0
- onnx/backend/test/case/node/matmulinteger.py +1 -0
- onnx/backend/test/case/node/max.py +1 -0
- onnx/backend/test/case/node/maxpool.py +1 -0
- onnx/backend/test/case/node/maxunpool.py +1 -0
- onnx/backend/test/case/node/mean.py +1 -0
- onnx/backend/test/case/node/meanvariancenormalization.py +1 -0
- onnx/backend/test/case/node/melweightmatrix.py +1 -0
- onnx/backend/test/case/node/min.py +1 -0
- onnx/backend/test/case/node/mish.py +1 -0
- onnx/backend/test/case/node/mod.py +1 -0
- onnx/backend/test/case/node/momentum.py +1 -0
- onnx/backend/test/case/node/mul.py +1 -0
- onnx/backend/test/case/node/neg.py +1 -0
- onnx/backend/test/case/node/negativeloglikelihoodloss.py +4 -1
- onnx/backend/test/case/node/nonmaxsuppression.py +1 -0
- onnx/backend/test/case/node/nonzero.py +1 -0
- onnx/backend/test/case/node/not.py +1 -0
- onnx/backend/test/case/node/onehot.py +1 -0
- onnx/backend/test/case/node/optionalgetelement.py +3 -2
- onnx/backend/test/case/node/optionalhaselement.py +2 -3
- onnx/backend/test/case/node/or.py +1 -0
- onnx/backend/test/case/node/pad.py +2 -1
- onnx/backend/test/case/node/pow.py +1 -0
- onnx/backend/test/case/node/prelu.py +1 -0
- onnx/backend/test/case/node/qlinearconv.py +1 -0
- onnx/backend/test/case/node/qlinearmatmul.py +1 -0
- onnx/backend/test/case/node/quantizelinear.py +1 -0
- onnx/backend/test/case/node/rangeop.py +1 -0
- onnx/backend/test/case/node/reciprocal.py +1 -0
- onnx/backend/test/case/node/reduce_log_sum.py +1 -0
- onnx/backend/test/case/node/reduce_log_sum_exp.py +1 -0
- onnx/backend/test/case/node/reducel1.py +1 -0
- onnx/backend/test/case/node/reducel2.py +1 -0
- onnx/backend/test/case/node/reducemax.py +2 -1
- onnx/backend/test/case/node/reducemean.py +1 -0
- onnx/backend/test/case/node/reducemin.py +1 -0
- onnx/backend/test/case/node/reduceprod.py +1 -0
- onnx/backend/test/case/node/reducesum.py +2 -1
- onnx/backend/test/case/node/reducesumsquare.py +1 -0
- onnx/backend/test/case/node/regex_full_match.py +1 -0
- onnx/backend/test/case/node/relu.py +1 -0
- onnx/backend/test/case/node/reshape.py +1 -0
- onnx/backend/test/case/node/resize.py +3 -2
- onnx/backend/test/case/node/reversesequence.py +1 -0
- onnx/backend/test/case/node/rnn.py +3 -2
- onnx/backend/test/case/node/roialign.py +1 -0
- onnx/backend/test/case/node/round.py +4 -3
- onnx/backend/test/case/node/scan.py +1 -0
- onnx/backend/test/case/node/scatter.py +1 -0
- onnx/backend/test/case/node/scatterelements.py +7 -3
- onnx/backend/test/case/node/scatternd.py +1 -0
- onnx/backend/test/case/node/selu.py +1 -0
- onnx/backend/test/case/node/sequence_map.py +1 -0
- onnx/backend/test/case/node/sequenceinsert.py +4 -3
- onnx/backend/test/case/node/shape.py +1 -0
- onnx/backend/test/case/node/shrink.py +1 -0
- onnx/backend/test/case/node/sigmoid.py +1 -0
- onnx/backend/test/case/node/sign.py +1 -0
- onnx/backend/test/case/node/sin.py +1 -0
- onnx/backend/test/case/node/sinh.py +1 -0
- onnx/backend/test/case/node/size.py +1 -0
- onnx/backend/test/case/node/slice.py +1 -0
- onnx/backend/test/case/node/softmax.py +1 -0
- onnx/backend/test/case/node/softmaxcrossentropy.py +4 -1
- onnx/backend/test/case/node/softplus.py +1 -0
- onnx/backend/test/case/node/softsign.py +1 -0
- onnx/backend/test/case/node/spacetodepth.py +1 -0
- onnx/backend/test/case/node/split.py +1 -0
- onnx/backend/test/case/node/splittosequence.py +1 -0
- onnx/backend/test/case/node/sqrt.py +1 -0
- onnx/backend/test/case/node/squeeze.py +1 -0
- onnx/backend/test/case/node/stft.py +4 -1
- onnx/backend/test/case/node/string_concat.py +1 -0
- onnx/backend/test/case/node/string_split.py +1 -0
- onnx/backend/test/case/node/stringnormalizer.py +1 -0
- onnx/backend/test/case/node/sub.py +1 -0
- onnx/backend/test/case/node/sum.py +1 -0
- onnx/backend/test/case/node/tan.py +1 -0
- onnx/backend/test/case/node/tanh.py +1 -0
- onnx/backend/test/case/node/tfidfvectorizer.py +1 -0
- onnx/backend/test/case/node/thresholdedrelu.py +1 -0
- onnx/backend/test/case/node/tile.py +1 -0
- onnx/backend/test/case/node/topk.py +1 -0
- onnx/backend/test/case/node/transpose.py +1 -0
- onnx/backend/test/case/node/trilu.py +1 -0
- onnx/backend/test/case/node/unique.py +7 -0
- onnx/backend/test/case/node/unsqueeze.py +1 -0
- onnx/backend/test/case/node/upsample.py +1 -0
- onnx/backend/test/case/node/where.py +1 -0
- onnx/backend/test/case/node/xor.py +1 -0
- onnx/backend/test/case/test_case.py +6 -5
- onnx/backend/test/case/utils.py +2 -2
- onnx/backend/test/cmd_tools.py +1 -0
- onnx/backend/test/data/node/test_acos/model.onnx +0 -0
- onnx/backend/test/data/node/test_acos/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_acos_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_acosh/model.onnx +0 -0
- onnx/backend/test/data/node/test_acosh/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_acosh_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_asin/model.onnx +0 -0
- onnx/backend/test/data/node/test_asin/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_asin_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_asinh/model.onnx +0 -0
- onnx/backend/test/data/node/test_asinh/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_asinh_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_atan/model.onnx +0 -0
- onnx/backend/test/data/node/test_atan/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_atan_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_atanh/model.onnx +0 -0
- onnx/backend/test/data/node/test_atanh/test_data_set_0/output_0.pb +2 -2
- onnx/backend/test/data/node/test_atanh_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_1d_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_2d_ceil/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_2d_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_2d_dilations/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_2d_pads/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_2d_pads_count_include_pad/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_2d_precomputed_pads/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_2d_precomputed_pads_count_include_pad/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_2d_precomputed_same_upper/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_2d_precomputed_strides/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_2d_same_lower/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_2d_same_upper/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_2d_strides/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_3d_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_0_ceil_mode_is_False/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_0_ceil_mode_is_True/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_1_ceil_mode_is_False/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_3d_dilations_large_count_include_pad_is_1_ceil_mode_is_True/model.onnx +0 -0
- onnx/backend/test/data/node/test_averagepool_3d_dilations_small/model.onnx +0 -0
- onnx/backend/test/data/node/test_basic_conv_with_padding/model.onnx +0 -0
- onnx/backend/test/data/node/test_basic_conv_without_padding/model.onnx +0 -0
- onnx/backend/test/data/node/test_basic_deform_conv_with_padding/model.onnx +0 -0
- onnx/backend/test/data/node/test_basic_deform_conv_without_padding/model.onnx +0 -0
- onnx/backend/test/data/node/test_bernoulli/model.onnx +0 -0
- onnx/backend/test/data/node/test_bernoulli_double/model.onnx +0 -0
- onnx/backend/test/data/node/test_bernoulli_double_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_bernoulli_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_bernoulli_seed/model.onnx +0 -0
- onnx/backend/test/data/node/test_bernoulli_seed_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_blackmanwindow/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_blackmanwindow_expanded/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_blackmanwindow_symmetric/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_blackmanwindow_symmetric_expanded/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_cast_FLOAT16_to_INT4/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_cast_FLOAT_to_INT4/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_cast_INT4_to_FLOAT/test_data_set_0/input_0.pb +1 -1
- onnx/backend/test/data/node/test_cast_INT4_to_FLOAT16/test_data_set_0/input_0.pb +1 -1
- onnx/backend/test/data/node/test_cast_INT4_to_INT8/test_data_set_0/input_0.pb +1 -1
- onnx/backend/test/data/node/test_conv_with_autopad_same/model.onnx +0 -0
- onnx/backend/test/data/node/test_conv_with_strides_and_asymmetric_padding/model.onnx +0 -0
- onnx/backend/test/data/node/test_conv_with_strides_no_padding/model.onnx +0 -0
- onnx/backend/test/data/node/test_conv_with_strides_padding/model.onnx +0 -0
- onnx/backend/test/data/node/test_convtranspose/model.onnx +0 -0
- onnx/backend/test/data/node/test_convtranspose_1d/model.onnx +0 -0
- onnx/backend/test/data/node/test_convtranspose_3d/model.onnx +0 -0
- onnx/backend/test/data/node/test_convtranspose_autopad_same/model.onnx +0 -0
- onnx/backend/test/data/node/test_convtranspose_dilations/model.onnx +0 -0
- onnx/backend/test/data/node/test_convtranspose_group_2/model.onnx +0 -0
- onnx/backend/test/data/node/test_convtranspose_group_2/test_data_set_0/input_0.pb +0 -0
- onnx/backend/test/data/node/test_convtranspose_group_2/test_data_set_0/input_1.pb +0 -0
- onnx/backend/test/data/node/test_convtranspose_group_2/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_convtranspose_group_2_image_3/model.onnx +0 -0
- onnx/backend/test/data/node/test_convtranspose_group_2_image_3/test_data_set_0/input_0.pb +0 -0
- onnx/backend/test/data/node/test_convtranspose_group_2_image_3/test_data_set_0/input_1.pb +0 -0
- onnx/backend/test/data/node/test_convtranspose_group_2_image_3/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_convtranspose_kernel_shape/model.onnx +0 -0
- onnx/backend/test/data/node/test_convtranspose_output_shape/model.onnx +0 -0
- onnx/backend/test/data/node/test_convtranspose_pad/model.onnx +0 -0
- onnx/backend/test/data/node/test_convtranspose_pads/model.onnx +0 -0
- onnx/backend/test/data/node/test_cos/model.onnx +0 -0
- onnx/backend/test/data/node/test_cos_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_cosh/model.onnx +0 -0
- onnx/backend/test/data/node/test_cosh/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_cosh_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_cosh_example/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_deform_conv_with_mask_bias/model.onnx +0 -0
- onnx/backend/test/data/node/test_deform_conv_with_multiple_offset_groups/model.onnx +0 -0
- onnx/backend/test/data/node/test_dequantizelinear_int4/test_data_set_0/input_0.pb +1 -1
- onnx/backend/test/data/node/test_det_2d/model.onnx +0 -0
- onnx/backend/test/data/node/test_det_nd/model.onnx +0 -0
- onnx/backend/test/data/node/test_dft/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_dft_axis/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_dft_axis_opset19/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_dft_inverse/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_dft_inverse_opset19/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_dft_opset19/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_dropout_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_dropout_default_mask/model.onnx +0 -0
- onnx/backend/test/data/node/test_dropout_default_mask_ratio/model.onnx +0 -0
- onnx/backend/test/data/node/test_dropout_default_ratio/model.onnx +0 -0
- onnx/backend/test/data/node/test_elu/model.onnx +0 -0
- onnx/backend/test/data/node/test_elu_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_elu_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_eyelike_populate_off_main_diagonal/model.onnx +0 -0
- onnx/backend/test/data/node/test_eyelike_with_dtype/model.onnx +0 -0
- onnx/backend/test/data/node/test_eyelike_without_dtype/model.onnx +0 -0
- onnx/backend/test/data/node/test_gelu_default_1/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_gelu_default_1_expanded/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_gelu_default_2/test_data_set_0/output_0.pb +4 -3
- onnx/backend/test/data/node/test_gelu_default_2_expanded/test_data_set_0/output_0.pb +4 -3
- onnx/backend/test/data/node/test_gelu_tanh_2/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_gelu_tanh_2_expanded/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_globalaveragepool/model.onnx +0 -0
- onnx/backend/test/data/node/test_globalaveragepool_precomputed/model.onnx +0 -0
- onnx/backend/test/data/node/test_globalmaxpool/model.onnx +0 -0
- onnx/backend/test/data/node/test_globalmaxpool_precomputed/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_aligncorners_true/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_bicubic/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_bicubic_align_corners_0_additional_1/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_bicubic_align_corners_1_additional_1/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_bilinear/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_bilinear_align_corners_0_additional_1/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_bilinear_align_corners_1_additional_1/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_border_padding/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_nearest/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_nearest_align_corners_0_additional_1/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_nearest_align_corners_1_additional_1/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_reflection_padding/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_volumetric_bilinear_align_corners_0/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_volumetric_bilinear_align_corners_1/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_volumetric_nearest_align_corners_0/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_volumetric_nearest_align_corners_1/model.onnx +0 -0
- onnx/backend/test/data/node/test_gridsample_zeros_padding/model.onnx +0 -0
- onnx/backend/test/data/node/test_gru_batchwise/model.onnx +0 -0
- onnx/backend/test/data/node/test_gru_defaults/model.onnx +0 -0
- onnx/backend/test/data/node/test_gru_seq_length/model.onnx +0 -0
- onnx/backend/test/data/node/test_gru_with_initial_bias/model.onnx +0 -0
- onnx/backend/test/data/node/test_hammingwindow/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_hammingwindow_expanded/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_hammingwindow_symmetric/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_hammingwindow_symmetric_expanded/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_hannwindow/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_hannwindow_expanded/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_hannwindow_symmetric/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_hannwindow_symmetric_expanded/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_hardsigmoid/model.onnx +0 -0
- onnx/backend/test/data/node/test_hardsigmoid_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_hardsigmoid_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_hardswish/model.onnx +0 -0
- onnx/backend/test/data/node/test_hardswish_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_image_decoder_decode_jpeg2k_rgb/test_data_set_0/input_0.pb +0 -0
- onnx/backend/test/data/node/test_instancenorm_epsilon/model.onnx +0 -0
- onnx/backend/test/data/node/test_instancenorm_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_lppool_1d_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_lppool_1d_default/test_data_set_0/output_0.pb +2 -2
- onnx/backend/test/data/node/test_lppool_2d_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_lppool_2d_default/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_lppool_2d_dilations/model.onnx +0 -0
- onnx/backend/test/data/node/test_lppool_2d_pads/model.onnx +0 -0
- onnx/backend/test/data/node/test_lppool_2d_pads/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_lppool_2d_same_lower/model.onnx +0 -0
- onnx/backend/test/data/node/test_lppool_2d_same_lower/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_lppool_2d_same_upper/model.onnx +0 -0
- onnx/backend/test/data/node/test_lppool_2d_same_upper/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_lppool_2d_strides/model.onnx +0 -0
- onnx/backend/test/data/node/test_lppool_2d_strides/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_lppool_3d_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_lppool_3d_default/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_lstm_batchwise/model.onnx +0 -0
- onnx/backend/test/data/node/test_lstm_defaults/model.onnx +0 -0
- onnx/backend/test/data/node/test_lstm_with_initial_bias/model.onnx +0 -0
- onnx/backend/test/data/node/test_lstm_with_peepholes/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_1d_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_2d_ceil/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_2d_ceil_output_size_reduce_by_one/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_2d_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_2d_dilations/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_2d_pads/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_2d_precomputed_pads/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_2d_precomputed_same_upper/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_2d_precomputed_strides/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_2d_same_lower/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_2d_same_upper/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_2d_strides/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_2d_uint8/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_3d_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_3d_dilations/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_3d_dilations_use_ref_impl/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_3d_dilations_use_ref_impl_large/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_with_argmax_2d_precomputed_pads/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxpool_with_argmax_2d_precomputed_strides/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxunpool_export_with_output_shape/model.onnx +0 -0
- onnx/backend/test/data/node/test_maxunpool_export_without_output_shape/model.onnx +0 -0
- onnx/backend/test/data/node/test_mish/model.onnx +0 -0
- onnx/backend/test/data/node/test_mish/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_mish_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_mish_expanded/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_nllloss_NC/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NC_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1_ii/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1_ii_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1_mean_weight_negative_ii/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1_mean_weight_negative_ii_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1_weight/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1_weight_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1_weight_ii/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1_weight_ii_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_no_weight_reduction_mean_ii/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_no_weight_reduction_mean_ii_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_mean/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_mean_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_sum/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_reduction_sum_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_mean/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_mean_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_ii/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2_with_weight_reduction_sum_ii_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2d3_none_no_weight_negative_ii/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2d3_none_no_weight_negative_ii_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2d3_sum_weight_high_ii/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2d3_sum_weight_high_ii_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_mean_weight/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_mean_weight_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_none_no_weight/model.onnx +0 -0
- onnx/backend/test/data/node/test_nllloss_NCd1d2d3d4d5_none_no_weight_expanded/model.onnx +0 -0
- onnx/backend/test/data/node/test_quantizelinear_int4/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_random/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_reduce_log_sum_exp_do_not_keepdims_random_expanded/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_random/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_reduce_log_sum_exp_keepdims_random_expanded/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_random/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_reduce_max_empty_set/model.onnx +0 -0
- onnx/backend/test/data/node/test_reduce_max_empty_set/test_data_set_0/input_0.pb +0 -0
- onnx/backend/test/data/node/test_reduce_max_empty_set/test_data_set_0/input_1.pb +0 -0
- onnx/backend/test/data/node/test_reduce_max_empty_set/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_reduce_sum_empty_axes_input_noop/model.onnx +0 -0
- onnx/backend/test/data/node/test_reduce_sum_empty_axes_input_noop/test_data_set_0/input_0.pb +1 -0
- onnx/backend/test/data/node/test_reduce_sum_empty_axes_input_noop/test_data_set_0/input_1.pb +0 -0
- onnx/backend/test/data/node/test_reduce_sum_empty_axes_input_noop/test_data_set_0/output_0.pb +1 -0
- onnx/backend/test/data/node/test_reduce_sum_negative_axes_keepdims_random/model.onnx +0 -0
- onnx/backend/test/data/node/test_reduce_sum_negative_axes_keepdims_random/test_data_set_0/input_1.pb +0 -0
- onnx/backend/test/data/node/test_reduce_sum_negative_axes_keepdims_random/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_resize_tf_crop_and_resize/model.onnx +0 -0
- onnx/backend/test/data/node/test_resize_tf_crop_and_resize/test_data_set_0/input_1.pb +0 -0
- onnx/backend/test/data/node/test_resize_tf_crop_and_resize/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_resize_tf_crop_and_resize_extrapolation_value/model.onnx +0 -0
- onnx/backend/test/data/node/test_resize_tf_crop_and_resize_extrapolation_value/test_data_set_0/input_0.pb +0 -0
- onnx/backend/test/data/node/test_resize_tf_crop_and_resize_extrapolation_value/test_data_set_0/input_1.pb +0 -0
- onnx/backend/test/data/node/test_resize_tf_crop_and_resize_extrapolation_value/test_data_set_0/input_2.pb +0 -0
- onnx/backend/test/data/node/test_resize_tf_crop_and_resize_extrapolation_value/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_not_larger/model.onnx +0 -0
- onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_not_larger/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_not_smaller/model.onnx +0 -0
- onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_not_smaller/test_data_set_0/input_0.pb +0 -0
- onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_not_smaller/test_data_set_0/input_1.pb +0 -0
- onnx/backend/test/data/node/test_resize_upsample_sizes_nearest_not_smaller/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_rnn_seq_length/model.onnx +0 -0
- onnx/backend/test/data/node/test_roialign_aligned_false/model.onnx +0 -0
- onnx/backend/test/data/node/test_roialign_aligned_true/model.onnx +0 -0
- onnx/backend/test/data/node/test_roialign_mode_max/model.onnx +0 -0
- onnx/backend/test/data/node/test_round/model.onnx +0 -0
- onnx/backend/test/data/node/test_selu/model.onnx +0 -0
- onnx/backend/test/data/node/test_selu_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_selu_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_simple_rnn_batchwise/model.onnx +0 -0
- onnx/backend/test/data/node/test_simple_rnn_defaults/model.onnx +0 -0
- onnx/backend/test/data/node/test_simple_rnn_with_initial_bias/model.onnx +0 -0
- onnx/backend/test/data/node/test_sin/model.onnx +0 -0
- onnx/backend/test/data/node/test_sin_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_sinh/model.onnx +0 -0
- onnx/backend/test/data/node/test_sinh/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_sinh_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_softplus/model.onnx +0 -0
- onnx/backend/test/data/node/test_softplus_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_softsign/model.onnx +0 -0
- onnx/backend/test/data/node/test_softsign_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_stft_with_window/test_data_set_0/input_2.pb +0 -0
- onnx/backend/test/data/node/test_stft_with_window/test_data_set_0/output_0.pb +0 -0
- onnx/backend/test/data/node/test_tan/model.onnx +0 -0
- onnx/backend/test/data/node/test_tan/test_data_set_0/output_0.pb +1 -1
- onnx/backend/test/data/node/test_tan_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_thresholdedrelu/model.onnx +0 -0
- onnx/backend/test/data/node/test_thresholdedrelu_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_thresholdedrelu_example/model.onnx +0 -0
- onnx/backend/test/data/node/test_training_dropout/model.onnx +0 -0
- onnx/backend/test/data/node/test_training_dropout_default/model.onnx +0 -0
- onnx/backend/test/data/node/test_training_dropout_default_mask/model.onnx +0 -0
- onnx/backend/test/data/node/test_training_dropout_mask/model.onnx +0 -0
- onnx/backend/test/data/node/test_training_dropout_zero_ratio/model.onnx +0 -0
- onnx/backend/test/data/node/test_training_dropout_zero_ratio_mask/model.onnx +0 -0
- onnx/backend/test/loader/__init__.py +11 -6
- onnx/backend/test/report/__init__.py +4 -3
- onnx/backend/test/report/base.py +1 -0
- onnx/backend/test/report/coverage.py +21 -20
- onnx/backend/test/runner/__init__.py +13 -11
- onnx/backend/test/runner/item.py +3 -2
- onnx/backend/test/stat_coverage.py +6 -5
- onnx/bin/checker.py +1 -0
- onnx/checker.cc +6 -1
- onnx/common/version.h +1 -1
- onnx/compose.py +66 -50
- onnx/cpp2py_export.cc +4 -0
- onnx/defs/__init__.py +2 -2
- onnx/defs/data_type_utils.cc +0 -1
- onnx/defs/gen_doc.py +9 -8
- onnx/defs/gen_shape_inference_information.py +1 -0
- onnx/defs/generator/defs.cc +32 -84
- onnx/defs/generator/old.cc +389 -0
- onnx/defs/math/defs.cc +308 -313
- onnx/defs/math/old.cc +996 -9
- onnx/defs/math/utils.cc +12 -1
- onnx/defs/math/utils.h +2 -0
- onnx/defs/nn/defs.cc +57 -75
- onnx/defs/nn/old.cc +1536 -2
- onnx/defs/object_detection/defs.cc +4 -7
- onnx/defs/object_detection/old.cc +117 -0
- onnx/defs/operator_sets.h +108 -1
- onnx/defs/parser.cc +10 -1
- onnx/defs/quantization/defs.cc +3 -2
- onnx/defs/quantization/old.cc +4 -1
- onnx/defs/rnn/defs.cc +10 -13
- onnx/defs/rnn/old.cc +517 -2
- onnx/defs/schema.cc +53 -59
- onnx/defs/schema.h +58 -2
- onnx/defs/shape_inference.h +67 -18
- onnx/defs/tensor/defs.cc +22 -20
- onnx/defs/tensor/old.cc +114 -3
- onnx/external_data_helper.py +27 -14
- onnx/gen_proto.py +3 -2
- onnx/helper.py +86 -61
- onnx/hub.py +39 -35
- onnx/inliner/inliner.cc +0 -1
- onnx/mapping.py +3 -2
- onnx/numpy_helper.py +159 -23
- onnx/onnx-ml.proto +1 -1
- onnx/onnx.in.proto +1 -1
- onnx/onnx.proto +1 -1
- onnx/onnx_cpp2py_export/defs.pyi +0 -2
- onnx/onnx_cpp2py_export/inliner.pyi +0 -4
- onnx/onnx_cpp2py_export/parser.pyi +0 -4
- onnx/onnx_cpp2py_export.cp312-win32.pyd +0 -0
- onnx/parser.py +1 -0
- onnx/printer.py +2 -3
- onnx/reference/__init__.py +1 -0
- onnx/reference/custom_element_types.py +73 -8
- onnx/reference/op_run.py +13 -58
- onnx/reference/ops/__init__.py +1 -0
- onnx/reference/ops/_helpers.py +6 -4
- onnx/reference/ops/_op.py +16 -5
- onnx/reference/ops/_op_common_indices.py +1 -1
- onnx/reference/ops/_op_common_pool.py +38 -29
- onnx/reference/ops/_op_common_random.py +1 -1
- onnx/reference/ops/_op_common_window.py +2 -2
- onnx/reference/ops/_op_list.py +9 -6
- onnx/reference/ops/aionnx_preview_training/__init__.py +1 -0
- onnx/reference/ops/aionnx_preview_training/_op_list.py +5 -7
- onnx/reference/ops/aionnx_preview_training/_op_run_training.py +1 -1
- onnx/reference/ops/aionnx_preview_training/op_adagrad.py +14 -5
- onnx/reference/ops/aionnx_preview_training/op_adam.py +2 -2
- onnx/reference/ops/aionnx_preview_training/op_momentum.py +14 -2
- onnx/reference/ops/aionnxml/__init__.py +1 -0
- onnx/reference/ops/aionnxml/_common_classifier.py +1 -0
- onnx/reference/ops/aionnxml/_op_list.py +5 -6
- onnx/reference/ops/aionnxml/_op_run_aionnxml.py +1 -1
- onnx/reference/ops/aionnxml/op_array_feature_extractor.py +1 -1
- onnx/reference/ops/aionnxml/op_binarizer.py +1 -1
- onnx/reference/ops/aionnxml/op_dict_vectorizer.py +2 -2
- onnx/reference/ops/aionnxml/op_feature_vectorizer.py +1 -1
- onnx/reference/ops/aionnxml/op_imputer.py +3 -3
- onnx/reference/ops/aionnxml/op_label_encoder.py +1 -1
- onnx/reference/ops/aionnxml/op_linear_classifier.py +2 -2
- onnx/reference/ops/aionnxml/op_linear_regressor.py +1 -1
- onnx/reference/ops/aionnxml/op_normalizer.py +1 -1
- onnx/reference/ops/aionnxml/op_one_hot_encoder.py +1 -1
- onnx/reference/ops/aionnxml/op_scaler.py +1 -1
- onnx/reference/ops/aionnxml/op_svm_classifier.py +10 -7
- onnx/reference/ops/aionnxml/op_svm_helper.py +2 -2
- onnx/reference/ops/aionnxml/op_svm_regressor.py +1 -1
- onnx/reference/ops/aionnxml/op_tree_ensemble.py +3 -3
- onnx/reference/ops/aionnxml/op_tree_ensemble_classifier.py +1 -1
- onnx/reference/ops/aionnxml/op_tree_ensemble_helper.py +2 -2
- onnx/reference/ops/aionnxml/op_tree_ensemble_regressor.py +5 -3
- onnx/reference/ops/experimental/__init__.py +1 -0
- onnx/reference/ops/experimental/_op_list.py +6 -12
- onnx/reference/ops/experimental/_op_run_experimental.py +1 -1
- onnx/reference/ops/experimental/op_im2col.py +1 -1
- onnx/reference/ops/op_abs.py +1 -1
- onnx/reference/ops/op_acos.py +1 -1
- onnx/reference/ops/op_acosh.py +1 -1
- onnx/reference/ops/op_add.py +1 -1
- onnx/reference/ops/op_affine_grid.py +1 -1
- onnx/reference/ops/op_and.py +1 -1
- onnx/reference/ops/op_argmax.py +1 -1
- onnx/reference/ops/op_argmin.py +1 -1
- onnx/reference/ops/op_asin.py +1 -1
- onnx/reference/ops/op_asinh.py +1 -1
- onnx/reference/ops/op_atan.py +1 -1
- onnx/reference/ops/op_atanh.py +1 -1
- onnx/reference/ops/op_attribute_has_value.py +15 -15
- onnx/reference/ops/op_average_pool.py +1 -1
- onnx/reference/ops/op_batch_normalization.py +13 -2
- onnx/reference/ops/op_bernoulli.py +1 -1
- onnx/reference/ops/op_bitshift.py +1 -1
- onnx/reference/ops/op_bitwise_and.py +1 -1
- onnx/reference/ops/op_bitwise_not.py +1 -1
- onnx/reference/ops/op_bitwise_or.py +1 -1
- onnx/reference/ops/op_bitwise_xor.py +1 -1
- onnx/reference/ops/op_blackman_window.py +1 -1
- onnx/reference/ops/op_cast.py +11 -10
- onnx/reference/ops/op_cast_like.py +1 -1
- onnx/reference/ops/op_ceil.py +1 -1
- onnx/reference/ops/op_celu.py +1 -1
- onnx/reference/ops/op_center_crop_pad.py +1 -1
- onnx/reference/ops/op_clip.py +1 -1
- onnx/reference/ops/op_col2im.py +10 -4
- onnx/reference/ops/op_compress.py +1 -1
- onnx/reference/ops/op_concat.py +1 -1
- onnx/reference/ops/op_concat_from_sequence.py +3 -3
- onnx/reference/ops/op_constant.py +2 -2
- onnx/reference/ops/op_constant_of_shape.py +1 -1
- onnx/reference/ops/op_conv.py +22 -17
- onnx/reference/ops/op_conv_integer.py +1 -1
- onnx/reference/ops/op_conv_transpose.py +37 -6
- onnx/reference/ops/op_cos.py +1 -1
- onnx/reference/ops/op_cosh.py +1 -1
- onnx/reference/ops/op_cum_sum.py +1 -1
- onnx/reference/ops/op_deform_conv.py +1 -1
- onnx/reference/ops/op_depth_to_space.py +1 -1
- onnx/reference/ops/op_dequantize_linear.py +7 -9
- onnx/reference/ops/op_det.py +1 -1
- onnx/reference/ops/op_dft.py +16 -2
- onnx/reference/ops/op_div.py +1 -1
- onnx/reference/ops/op_dropout.py +9 -8
- onnx/reference/ops/op_dynamic_quantize_linear.py +1 -1
- onnx/reference/ops/op_einsum.py +1 -1
- onnx/reference/ops/op_elu.py +1 -1
- onnx/reference/ops/op_equal.py +1 -1
- onnx/reference/ops/op_erf.py +1 -1
- onnx/reference/ops/op_exp.py +1 -1
- onnx/reference/ops/op_expand.py +1 -1
- onnx/reference/ops/op_eyelike.py +2 -2
- onnx/reference/ops/op_flatten.py +1 -1
- onnx/reference/ops/op_floor.py +1 -1
- onnx/reference/ops/op_gather.py +1 -1
- onnx/reference/ops/op_gather_elements.py +3 -3
- onnx/reference/ops/op_gathernd.py +2 -4
- onnx/reference/ops/op_gemm.py +12 -2
- onnx/reference/ops/op_global_average_pool.py +1 -1
- onnx/reference/ops/op_global_max_pool.py +1 -1
- onnx/reference/ops/op_greater.py +1 -1
- onnx/reference/ops/op_greater_or_equal.py +1 -1
- onnx/reference/ops/op_grid_sample.py +2 -3
- onnx/reference/ops/op_gru.py +7 -7
- onnx/reference/ops/op_hamming_window.py +1 -1
- onnx/reference/ops/op_hann_window.py +1 -1
- onnx/reference/ops/op_hard_sigmoid.py +1 -1
- onnx/reference/ops/op_hardmax.py +5 -2
- onnx/reference/ops/op_identity.py +3 -3
- onnx/reference/ops/op_if.py +2 -2
- onnx/reference/ops/op_instance_normalization.py +1 -1
- onnx/reference/ops/op_isinf.py +1 -1
- onnx/reference/ops/op_isnan.py +1 -1
- onnx/reference/ops/op_layer_normalization.py +2 -4
- onnx/reference/ops/op_leaky_relu.py +1 -1
- onnx/reference/ops/op_less.py +1 -1
- onnx/reference/ops/op_less_or_equal.py +1 -1
- onnx/reference/ops/op_log.py +1 -1
- onnx/reference/ops/op_log_softmax.py +1 -1
- onnx/reference/ops/op_loop.py +4 -2
- onnx/reference/ops/op_lp_normalization.py +1 -1
- onnx/reference/ops/op_lp_pool.py +4 -2
- onnx/reference/ops/op_lrn.py +1 -1
- onnx/reference/ops/op_lstm.py +9 -11
- onnx/reference/ops/op_matmul.py +1 -1
- onnx/reference/ops/op_matmul_integer.py +1 -1
- onnx/reference/ops/op_max.py +1 -1
- onnx/reference/ops/op_max_pool.py +8 -8
- onnx/reference/ops/op_max_unpool.py +5 -3
- onnx/reference/ops/op_mean.py +1 -1
- onnx/reference/ops/op_mel_weight_matrix.py +1 -1
- onnx/reference/ops/op_min.py +1 -1
- onnx/reference/ops/op_mod.py +1 -1
- onnx/reference/ops/op_mul.py +1 -1
- onnx/reference/ops/op_neg.py +1 -1
- onnx/reference/ops/op_negative_log_likelihood_loss.py +4 -2
- onnx/reference/ops/op_non_max_suppression.py +10 -11
- onnx/reference/ops/op_non_zero.py +1 -1
- onnx/reference/ops/op_not.py +1 -1
- onnx/reference/ops/op_one_hot.py +1 -1
- onnx/reference/ops/op_optional.py +1 -1
- onnx/reference/ops/op_optional_get_element.py +1 -1
- onnx/reference/ops/op_optional_has_element.py +1 -1
- onnx/reference/ops/op_or.py +1 -1
- onnx/reference/ops/op_pad.py +1 -1
- onnx/reference/ops/op_pool_common.py +7 -6
- onnx/reference/ops/op_pow.py +1 -1
- onnx/reference/ops/op_prelu.py +3 -3
- onnx/reference/ops/op_qlinear_conv.py +1 -1
- onnx/reference/ops/op_qlinear_matmul.py +1 -1
- onnx/reference/ops/op_quantize_linear.py +15 -9
- onnx/reference/ops/op_random_normal.py +1 -1
- onnx/reference/ops/op_random_normal_like.py +1 -1
- onnx/reference/ops/op_random_uniform.py +1 -1
- onnx/reference/ops/op_random_uniform_like.py +1 -1
- onnx/reference/ops/op_range.py +1 -1
- onnx/reference/ops/op_reciprocal.py +1 -1
- onnx/reference/ops/op_reduce_l1.py +1 -1
- onnx/reference/ops/op_reduce_l2.py +1 -1
- onnx/reference/ops/op_reduce_log_sum.py +1 -1
- onnx/reference/ops/op_reduce_log_sum_exp.py +1 -1
- onnx/reference/ops/op_reduce_max.py +1 -1
- onnx/reference/ops/op_reduce_mean.py +2 -2
- onnx/reference/ops/op_reduce_min.py +1 -1
- onnx/reference/ops/op_reduce_prod.py +1 -1
- onnx/reference/ops/op_reduce_sum.py +2 -2
- onnx/reference/ops/op_reduce_sum_square.py +1 -1
- onnx/reference/ops/op_regex_full_match.py +1 -1
- onnx/reference/ops/op_relu.py +1 -1
- onnx/reference/ops/op_reshape.py +1 -1
- onnx/reference/ops/op_reverse_sequence.py +1 -1
- onnx/reference/ops/op_rnn.py +10 -8
- onnx/reference/ops/op_roi_align.py +5 -5
- onnx/reference/ops/op_round.py +1 -1
- onnx/reference/ops/op_scan.py +8 -8
- onnx/reference/ops/op_scatter_elements.py +19 -50
- onnx/reference/ops/op_scatternd.py +1 -1
- onnx/reference/ops/op_selu.py +1 -1
- onnx/reference/ops/op_sequence_at.py +1 -1
- onnx/reference/ops/op_sequence_construct.py +1 -1
- onnx/reference/ops/op_sequence_empty.py +2 -2
- onnx/reference/ops/op_sequence_erase.py +1 -1
- onnx/reference/ops/op_sequence_insert.py +6 -6
- onnx/reference/ops/op_sequence_length.py +1 -1
- onnx/reference/ops/op_sequence_map.py +1 -1
- onnx/reference/ops/op_shape.py +2 -6
- onnx/reference/ops/op_shrink.py +1 -1
- onnx/reference/ops/op_sigmoid.py +1 -1
- onnx/reference/ops/op_sign.py +1 -1
- onnx/reference/ops/op_sin.py +1 -1
- onnx/reference/ops/op_sinh.py +1 -1
- onnx/reference/ops/op_size.py +1 -1
- onnx/reference/ops/op_slice.py +3 -5
- onnx/reference/ops/op_softmax.py +1 -1
- onnx/reference/ops/op_softmax_cross_entropy_loss.py +1 -1
- onnx/reference/ops/op_softplus.py +1 -1
- onnx/reference/ops/op_softsign.py +1 -1
- onnx/reference/ops/op_space_to_depth.py +1 -1
- onnx/reference/ops/op_split.py +1 -1
- onnx/reference/ops/op_split_to_sequence.py +5 -7
- onnx/reference/ops/op_sqrt.py +1 -1
- onnx/reference/ops/op_squeeze.py +1 -1
- onnx/reference/ops/op_stft.py +3 -2
- onnx/reference/ops/op_string_concat.py +1 -1
- onnx/reference/ops/op_string_normalizer.py +8 -8
- onnx/reference/ops/op_string_split.py +2 -4
- onnx/reference/ops/op_sub.py +1 -1
- onnx/reference/ops/op_sum.py +1 -1
- onnx/reference/ops/op_tan.py +1 -1
- onnx/reference/ops/op_tanh.py +1 -1
- onnx/reference/ops/op_tfidf_vectorizer.py +11 -12
- onnx/reference/ops/op_thresholded_relu.py +1 -1
- onnx/reference/ops/op_tile.py +1 -1
- onnx/reference/ops/op_topk.py +7 -2
- onnx/reference/ops/op_transpose.py +1 -1
- onnx/reference/ops/op_trilu.py +1 -1
- onnx/reference/ops/op_unique.py +3 -1
- onnx/reference/ops/op_unsqueeze.py +2 -2
- onnx/reference/ops/op_upsample.py +1 -1
- onnx/reference/ops/op_where.py +1 -1
- onnx/reference/ops/op_xor.py +1 -1
- onnx/reference/ops_optimized/__init__.py +1 -0
- onnx/reference/ops_optimized/op_conv_optimized.py +1 -1
- onnx/reference/reference_evaluator.py +27 -13
- onnx/serialization.py +1 -1
- onnx/shape_inference/implementation.cc +15 -1
- onnx/shape_inference/implementation.h +15 -1
- onnx/shape_inference.py +1 -1
- onnx/subbyte.py +6 -6
- onnx/test/basic_test.py +1 -0
- onnx/test/checker_test.py +37 -2
- onnx/test/compose_test.py +12 -11
- onnx/test/cpp/schema_registration_test.cc +3 -3
- onnx/test/cpp/shape_inference_test.cc +38 -2
- onnx/test/elu_test.py +2 -0
- onnx/test/function_inference_test.py +2 -0
- onnx/test/function_test.py +1 -0
- onnx/test/helper_test.py +77 -16
- onnx/test/hub_test.py +1 -1
- onnx/test/inference_function_test.py +25 -8
- onnx/test/inliner_test.py +2 -0
- onnx/test/model_container_refeval_test.py +2 -1
- onnx/test/model_container_test.py +1 -0
- onnx/test/model_inference_test.py +2 -0
- onnx/test/numpy_helper_test.py +56 -1
- onnx/test/parser_test.py +48 -2
- onnx/test/printer_test.py +2 -0
- onnx/test/reference_evaluator_ml_test.py +2 -3
- onnx/test/reference_evaluator_model_test.py +2 -0
- onnx/test/reference_evaluator_test.py +173 -19
- onnx/test/relu_test.py +2 -0
- onnx/test/schema_test.py +4 -2
- onnx/test/serialization_test.py +2 -0
- onnx/test/shape_inference_test.py +349 -19
- onnx/test/symbolic_shape_test.py +3 -3
- onnx/test/test_backend_onnxruntime.py +272 -1
- onnx/test/test_backend_reference.py +24 -3
- onnx/test/test_backend_test.py +6 -5
- onnx/test/test_external_data.py +91 -2
- onnx/test/test_with_ort.py +1 -0
- onnx/test/tools_test.py +15 -14
- onnx/test/training_tool_test.py +1 -0
- onnx/test/utils_test.py +1 -0
- onnx/test/version_converter/automatic_downgrade_test.py +2 -0
- onnx/test/version_converter/automatic_upgrade_test.py +2 -0
- onnx/test/version_converter_test.py +26 -7
- onnx/test/version_utils.py +8 -0
- onnx/tools/net_drawer.py +7 -6
- onnx/tools/replace_constants.py +11 -11
- onnx/tools/update_model_dims.py +7 -6
- onnx/utils.py +104 -21
- onnx/version.py +2 -2
- onnx/version_converter/adapters/split_17_18.h +1 -1
- onnx/version_converter/convert.h +107 -2
- onnx/version_converter.py +3 -2
- {onnx-1.16.1.dist-info → onnx-1.17.0.dist-info}/METADATA +8 -11
- {onnx-1.16.1.dist-info → onnx-1.17.0.dist-info}/RECORD +843 -817
- {onnx-1.16.1.dist-info → onnx-1.17.0.dist-info}/WHEEL +1 -1
- {onnx-1.16.1.dist-info → onnx-1.17.0.dist-info}/LICENSE +0 -0
- {onnx-1.16.1.dist-info → onnx-1.17.0.dist-info}/entry_points.txt +0 -0
- {onnx-1.16.1.dist-info → onnx-1.17.0.dist-info}/top_level.txt +0 -0
onnx/defs/rnn/old.cc
CHANGED
|
@@ -5,6 +5,521 @@
|
|
|
5
5
|
#include "onnx/defs/schema.h"
|
|
6
6
|
|
|
7
7
|
namespace ONNX_NAMESPACE {
|
|
8
|
+
|
|
9
|
+
void RNNShapeInference_opset14(InferenceContext& ctx) {
|
|
10
|
+
TensorShapeProto::Dimension num_directions, seq_length, batch_size, hidden_size;
|
|
11
|
+
|
|
12
|
+
auto direction = getAttribute(ctx, "direction", "forward");
|
|
13
|
+
if ((direction == "forward") || (direction == "reverse"))
|
|
14
|
+
num_directions.set_dim_value(1);
|
|
15
|
+
else if (direction == "bidirectional")
|
|
16
|
+
num_directions.set_dim_value(2);
|
|
17
|
+
// else leave num_directions unknown in case of incorrect attribute value
|
|
18
|
+
|
|
19
|
+
auto hidden_size_value = getAttribute(ctx, "hidden_size", -1);
|
|
20
|
+
if (hidden_size_value > 0)
|
|
21
|
+
hidden_size.set_dim_value(hidden_size_value);
|
|
22
|
+
|
|
23
|
+
auto layout_value = getAttribute(ctx, "layout", 0);
|
|
24
|
+
|
|
25
|
+
if (hasInputShape(ctx, 0)) {
|
|
26
|
+
auto& first_input_shape = getInputShape(ctx, 0);
|
|
27
|
+
if (first_input_shape.dim_size() != 3) {
|
|
28
|
+
fail_shape_inference("First input tensor must have rank 3");
|
|
29
|
+
}
|
|
30
|
+
seq_length = first_input_shape.dim((layout_value == 0) ? 0 : 1);
|
|
31
|
+
batch_size = first_input_shape.dim((layout_value == 0) ? 1 : 0);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
auto num_outputs = ctx.getNumOutputs();
|
|
35
|
+
|
|
36
|
+
if (num_outputs > 0) {
|
|
37
|
+
// Y
|
|
38
|
+
propagateElemTypeFromInputToOutput(ctx, 0, 0);
|
|
39
|
+
|
|
40
|
+
if (layout_value == 0) {
|
|
41
|
+
auto dims = {seq_length, num_directions, batch_size, hidden_size};
|
|
42
|
+
updateOutputShape(ctx, 0, dims);
|
|
43
|
+
} else {
|
|
44
|
+
auto dims = {batch_size, seq_length, num_directions, hidden_size};
|
|
45
|
+
updateOutputShape(ctx, 0, dims);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
if (num_outputs > 1) {
|
|
50
|
+
// Y_h
|
|
51
|
+
propagateElemTypeFromInputToOutput(ctx, 0, 1);
|
|
52
|
+
|
|
53
|
+
if (layout_value == 0) {
|
|
54
|
+
auto dims = {num_directions, batch_size, hidden_size};
|
|
55
|
+
updateOutputShape(ctx, 1, dims);
|
|
56
|
+
} else {
|
|
57
|
+
auto dims = {batch_size, num_directions, hidden_size};
|
|
58
|
+
updateOutputShape(ctx, 1, dims);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
if (num_outputs > 2) {
|
|
63
|
+
// Y_c : only in the case of LSTM
|
|
64
|
+
propagateElemTypeFromInputToOutput(ctx, 0, 2);
|
|
65
|
+
|
|
66
|
+
if (layout_value == 0) {
|
|
67
|
+
auto dims = {num_directions, batch_size, hidden_size};
|
|
68
|
+
updateOutputShape(ctx, 2, dims);
|
|
69
|
+
} else {
|
|
70
|
+
auto dims = {batch_size, num_directions, hidden_size};
|
|
71
|
+
updateOutputShape(ctx, 2, dims);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
std::function<void(OpSchema&)> RNNDocGenerator_opset14(const char* /*name*/) {
|
|
76
|
+
return [=](OpSchema& schema) {
|
|
77
|
+
schema.Attr(
|
|
78
|
+
"direction",
|
|
79
|
+
"Specify if the RNN is forward, reverse, or bidirectional. "
|
|
80
|
+
"Must be one of forward (default), reverse, or bidirectional.",
|
|
81
|
+
AttributeProto::STRING,
|
|
82
|
+
std::string("forward"));
|
|
83
|
+
schema.Attr(
|
|
84
|
+
"layout",
|
|
85
|
+
"The shape format of inputs X, initial_h and outputs Y, Y_h. "
|
|
86
|
+
"If 0, the following shapes are expected: "
|
|
87
|
+
"X.shape = [seq_length, batch_size, input_size], "
|
|
88
|
+
"Y.shape = [seq_length, num_directions, batch_size, hidden_size], "
|
|
89
|
+
"initial_h.shape = Y_h.shape = [num_directions, batch_size, hidden_size]. "
|
|
90
|
+
"If 1, the following shapes are expected: "
|
|
91
|
+
"X.shape = [batch_size, seq_length, input_size], "
|
|
92
|
+
"Y.shape = [batch_size, seq_length, num_directions, hidden_size], "
|
|
93
|
+
"initial_h.shape = Y_h.shape = [batch_size, num_directions, hidden_size].",
|
|
94
|
+
AttributeProto::INT,
|
|
95
|
+
static_cast<int64_t>(0));
|
|
96
|
+
schema.Attr("hidden_size", "Number of neurons in the hidden layer", AttributeProto::INT, OPTIONAL_VALUE);
|
|
97
|
+
schema.Attr(
|
|
98
|
+
"activation_alpha",
|
|
99
|
+
"Optional scaling values used by some activation functions. The values "
|
|
100
|
+
"are consumed in the order of activation functions, for example (f, g, h) "
|
|
101
|
+
"in LSTM. Default values are the same as of corresponding ONNX operators."
|
|
102
|
+
"For example with LeakyRelu, the default alpha is 0.01.",
|
|
103
|
+
AttributeProto::FLOATS,
|
|
104
|
+
OPTIONAL_VALUE);
|
|
105
|
+
schema.Attr(
|
|
106
|
+
"activation_beta",
|
|
107
|
+
"Optional scaling values used by some activation functions. The values "
|
|
108
|
+
"are consumed in the order of activation functions, for example (f, g, h) "
|
|
109
|
+
"in LSTM. Default values are the same as of corresponding ONNX operators.",
|
|
110
|
+
AttributeProto::FLOATS,
|
|
111
|
+
OPTIONAL_VALUE);
|
|
112
|
+
schema.Attr(
|
|
113
|
+
"clip",
|
|
114
|
+
"Cell clip threshold. Clipping bounds the elements of a tensor "
|
|
115
|
+
"in the range of [-threshold, +threshold] and is applied to the input "
|
|
116
|
+
"of activations. No clip if not specified.",
|
|
117
|
+
AttributeProto::FLOAT,
|
|
118
|
+
OPTIONAL_VALUE);
|
|
119
|
+
schema.Input(
|
|
120
|
+
0,
|
|
121
|
+
"X",
|
|
122
|
+
"The input sequences packed (and potentially padded) into one 3-D "
|
|
123
|
+
"tensor with the shape of `[seq_length, batch_size, input_size]`.",
|
|
124
|
+
"T",
|
|
125
|
+
OpSchema::Single,
|
|
126
|
+
true,
|
|
127
|
+
1,
|
|
128
|
+
OpSchema::Differentiable);
|
|
129
|
+
schema.Input(
|
|
130
|
+
4,
|
|
131
|
+
"sequence_lens",
|
|
132
|
+
"Optional tensor specifying lengths of the sequences in a batch. "
|
|
133
|
+
"If not specified - assumed all sequences in the batch to have "
|
|
134
|
+
"length `seq_length`. It has shape `[batch_size]`.",
|
|
135
|
+
"T1",
|
|
136
|
+
OpSchema::Optional,
|
|
137
|
+
true,
|
|
138
|
+
1,
|
|
139
|
+
OpSchema::NonDifferentiable);
|
|
140
|
+
schema.Input(
|
|
141
|
+
5,
|
|
142
|
+
"initial_h",
|
|
143
|
+
"Optional initial value of the hidden. If not specified - assumed "
|
|
144
|
+
"to be 0. It has shape `[num_directions, batch_size, hidden_size]`.",
|
|
145
|
+
"T",
|
|
146
|
+
OpSchema::Optional,
|
|
147
|
+
true,
|
|
148
|
+
1,
|
|
149
|
+
OpSchema::NonDifferentiable);
|
|
150
|
+
schema.Output(
|
|
151
|
+
0,
|
|
152
|
+
"Y",
|
|
153
|
+
"A tensor that concats all the intermediate output values of the hidden. "
|
|
154
|
+
"It has shape `[seq_length, num_directions, batch_size, hidden_size]`. ",
|
|
155
|
+
"T",
|
|
156
|
+
OpSchema::Optional,
|
|
157
|
+
true,
|
|
158
|
+
1,
|
|
159
|
+
OpSchema::Differentiable);
|
|
160
|
+
schema.Output(
|
|
161
|
+
1,
|
|
162
|
+
"Y_h",
|
|
163
|
+
"The last output value of the hidden. It has shape "
|
|
164
|
+
"`[num_directions, batch_size, hidden_size]`.",
|
|
165
|
+
"T",
|
|
166
|
+
OpSchema::Optional,
|
|
167
|
+
true,
|
|
168
|
+
1,
|
|
169
|
+
OpSchema::Differentiable);
|
|
170
|
+
schema.TypeConstraint(
|
|
171
|
+
"T",
|
|
172
|
+
{"tensor(float16)", "tensor(float)", "tensor(double)"},
|
|
173
|
+
"Constrain input and output types to float tensors.");
|
|
174
|
+
schema.TypeConstraint("T1", {"tensor(int32)"}, "Constrain seq_lens to integer tensor.");
|
|
175
|
+
schema.TypeAndShapeInferenceFunction(RNNShapeInference_opset14);
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
static const char* GRU_ver14_doc = R"DOC(
|
|
180
|
+
Computes an one-layer GRU. This operator is usually supported via some custom
|
|
181
|
+
implementation such as CuDNN.
|
|
182
|
+
|
|
183
|
+
Notations:
|
|
184
|
+
|
|
185
|
+
* `X` - input tensor
|
|
186
|
+
* `z` - update gate
|
|
187
|
+
* `r` - reset gate
|
|
188
|
+
* `h` - hidden gate
|
|
189
|
+
* `t` - time step (t-1 means previous time step)
|
|
190
|
+
* `W[zrh]` - W parameter weight matrix for update, reset, and hidden gates
|
|
191
|
+
* `R[zrh]` - R recurrence weight matrix for update, reset, and hidden gates
|
|
192
|
+
* `Wb[zrh]` - W bias vectors for update, reset, and hidden gates
|
|
193
|
+
* `Rb[zrh]` - R bias vectors for update, reset, and hidden gates
|
|
194
|
+
* `WB[zrh]` - W parameter weight matrix for backward update, reset, and hidden gates
|
|
195
|
+
* `RB[zrh]` - R recurrence weight matrix for backward update, reset, and hidden gates
|
|
196
|
+
* `WBb[zrh]` - W bias vectors for backward update, reset, and hidden gates
|
|
197
|
+
* `RBb[zrh]` - R bias vectors for backward update, reset, and hidden gates
|
|
198
|
+
* `H` - Hidden state
|
|
199
|
+
* `num_directions` - 2 if direction == bidirectional else 1
|
|
200
|
+
|
|
201
|
+
Activation functions:
|
|
202
|
+
|
|
203
|
+
* Relu(x) - max(0, x)
|
|
204
|
+
* Tanh(x) - (1 - e^{-2x})/(1 + e^{-2x})
|
|
205
|
+
* Sigmoid(x) - 1/(1 + e^{-x})
|
|
206
|
+
|
|
207
|
+
NOTE:
|
|
208
|
+
Below are optional
|
|
209
|
+
|
|
210
|
+
* Affine(x) - alpha * x + beta
|
|
211
|
+
* LeakyRelu(x) - x if x >= 0 else alpha * x
|
|
212
|
+
* ThresholdedRelu(x) - x if x >= alpha else 0
|
|
213
|
+
* ScaledTanh(x) - alpha * Tanh(beta * x)
|
|
214
|
+
* HardSigmoid(x) - min(max(alpha * x + beta, 0), 1)
|
|
215
|
+
* Elu(x) - x if x >= 0 else alpha * (e^x - 1)
|
|
216
|
+
* Softsign(x) - x/(1 + |x|)
|
|
217
|
+
* Softplus(x) - log(1 + e^x)
|
|
218
|
+
|
|
219
|
+
Equations (Default: f=Sigmoid, g=Tanh):
|
|
220
|
+
|
|
221
|
+
* zt = f(Xt*(Wz^T) + Ht-1*(Rz^T) + Wbz + Rbz)
|
|
222
|
+
* rt = f(Xt*(Wr^T) + Ht-1*(Rr^T) + Wbr + Rbr)
|
|
223
|
+
* ht = g(Xt*(Wh^T) + (rt (.) Ht-1)*(Rh^T) + Rbh + Wbh) # default, when linear_before_reset = 0
|
|
224
|
+
* ht = g(Xt*(Wh^T) + (rt (.) (Ht-1*(Rh^T) + Rbh)) + Wbh) # when linear_before_reset != 0
|
|
225
|
+
* Ht = (1 - zt) (.) ht + zt (.) Ht-1
|
|
226
|
+
)DOC";
|
|
227
|
+
|
|
228
|
+
ONNX_OPERATOR_SET_SCHEMA(
|
|
229
|
+
GRU,
|
|
230
|
+
14,
|
|
231
|
+
OpSchema()
|
|
232
|
+
.SetDoc(GET_OP_DOC_STR(std::string(GRU_ver14_doc) + GenerateOptionalArgumentsDoc()))
|
|
233
|
+
.Attr(
|
|
234
|
+
"activations",
|
|
235
|
+
"A list of 2 (or 4 if bidirectional) activation functions "
|
|
236
|
+
"for update, reset, and hidden gates. The activation functions must be one "
|
|
237
|
+
"of the activation functions specified above. Optional: See the equations "
|
|
238
|
+
"for default if not specified.",
|
|
239
|
+
AttributeProto::STRINGS,
|
|
240
|
+
OPTIONAL_VALUE)
|
|
241
|
+
.Attr(
|
|
242
|
+
"linear_before_reset",
|
|
243
|
+
"When computing the output of the hidden gate, "
|
|
244
|
+
"apply the linear transformation before multiplying by the output of the "
|
|
245
|
+
"reset gate.",
|
|
246
|
+
AttributeProto::INT,
|
|
247
|
+
static_cast<int64_t>(0))
|
|
248
|
+
.Input(
|
|
249
|
+
1,
|
|
250
|
+
"W",
|
|
251
|
+
"The weight tensor for the gates. Concatenation of `W[zrh]` and `WB[zrh]` "
|
|
252
|
+
"(if bidirectional) along dimension 0. This tensor has shape "
|
|
253
|
+
"`[num_directions, 3*hidden_size, input_size]`.",
|
|
254
|
+
"T",
|
|
255
|
+
OpSchema::Single,
|
|
256
|
+
true,
|
|
257
|
+
1,
|
|
258
|
+
OpSchema::Differentiable)
|
|
259
|
+
.Input(
|
|
260
|
+
2,
|
|
261
|
+
"R",
|
|
262
|
+
"The recurrence weight tensor. Concatenation of `R[zrh]` and `RB[zrh]` "
|
|
263
|
+
"(if bidirectional) along dimension 0. This tensor has shape "
|
|
264
|
+
"`[num_directions, 3*hidden_size, hidden_size]`.",
|
|
265
|
+
"T",
|
|
266
|
+
OpSchema::Single,
|
|
267
|
+
true,
|
|
268
|
+
1,
|
|
269
|
+
OpSchema::Differentiable)
|
|
270
|
+
.Input(
|
|
271
|
+
3,
|
|
272
|
+
"B",
|
|
273
|
+
"The bias tensor for the gates. Concatenation of `[Wb[zrh], Rb[zrh]]` and "
|
|
274
|
+
"`[WBb[zrh], RBb[zrh]]` (if bidirectional) along dimension 0. This tensor "
|
|
275
|
+
"has shape `[num_directions, 6*hidden_size]`. Optional: If not specified "
|
|
276
|
+
"- assumed to be 0",
|
|
277
|
+
"T",
|
|
278
|
+
OpSchema::Optional,
|
|
279
|
+
true,
|
|
280
|
+
1,
|
|
281
|
+
OpSchema::Differentiable)
|
|
282
|
+
.FillUsing(RNNDocGenerator_opset14("GRU")));
|
|
283
|
+
|
|
284
|
+
static const char* LSTM_ver14_doc = R"DOC(
|
|
285
|
+
Computes an one-layer LSTM. This operator is usually supported via some
|
|
286
|
+
custom implementation such as CuDNN.
|
|
287
|
+
|
|
288
|
+
Notations:
|
|
289
|
+
|
|
290
|
+
* `X` - input tensor
|
|
291
|
+
* `i` - input gate
|
|
292
|
+
* `o` - output gate
|
|
293
|
+
* `f` - forget gate
|
|
294
|
+
* `c` - cell gate
|
|
295
|
+
* `t` - time step (t-1 means previous time step)
|
|
296
|
+
* `W[iofc]` - W parameter weight matrix for input, output, forget, and cell gates
|
|
297
|
+
* `R[iofc]` - R recurrence weight matrix for input, output, forget, and cell gates
|
|
298
|
+
* `Wb[iofc]` - W bias vectors for input, output, forget, and cell gates
|
|
299
|
+
* `Rb[iofc]` - R bias vectors for input, output, forget, and cell gates
|
|
300
|
+
* `P[iof]` - P peephole weight vector for input, output, and forget gates
|
|
301
|
+
* `WB[iofc]` - W parameter weight matrix for backward input, output, forget, and cell gates
|
|
302
|
+
* `RB[iofc]` - R recurrence weight matrix for backward input, output, forget, and cell gates
|
|
303
|
+
* `WBb[iofc]` - W bias vectors for backward input, output, forget, and cell gates
|
|
304
|
+
* `RBb[iofc]` - R bias vectors for backward input, output, forget, and cell gates
|
|
305
|
+
* `PB[iof]` - P peephole weight vector for backward input, output, and forget gates
|
|
306
|
+
* `H` - Hidden state
|
|
307
|
+
* `num_directions` - 2 if direction == bidirectional else 1
|
|
308
|
+
|
|
309
|
+
Activation functions:
|
|
310
|
+
|
|
311
|
+
* Relu(x) - max(0, x)
|
|
312
|
+
* Tanh(x) - (1 - e^{-2x})/(1 + e^{-2x})
|
|
313
|
+
* Sigmoid(x) - 1/(1 + e^{-x})
|
|
314
|
+
|
|
315
|
+
NOTE: Below are optional
|
|
316
|
+
|
|
317
|
+
* Affine(x) - alpha*x + beta
|
|
318
|
+
* LeakyRelu(x) - x if x >= 0 else alpha * x
|
|
319
|
+
* ThresholdedRelu(x) - x if x >= alpha else 0
|
|
320
|
+
* ScaledTanh(x) - alpha*Tanh(beta*x)
|
|
321
|
+
* HardSigmoid(x) - min(max(alpha*x + beta, 0), 1)
|
|
322
|
+
* Elu(x) - x if x >= 0 else alpha*(e^x - 1)
|
|
323
|
+
* Softsign(x) - x/(1 + |x|)
|
|
324
|
+
* Softplus(x) - log(1 + e^x)
|
|
325
|
+
|
|
326
|
+
Equations (Default: f=Sigmoid, g=Tanh, h=Tanh):
|
|
327
|
+
|
|
328
|
+
* it = f(Xt*(Wi^T) + Ht-1*(Ri^T) + Pi (.) Ct-1 + Wbi + Rbi)
|
|
329
|
+
* ft = f(Xt*(Wf^T) + Ht-1*(Rf^T) + Pf (.) Ct-1 + Wbf + Rbf)
|
|
330
|
+
* ct = g(Xt*(Wc^T) + Ht-1*(Rc^T) + Wbc + Rbc)
|
|
331
|
+
* Ct = ft (.) Ct-1 + it (.) ct
|
|
332
|
+
* ot = f(Xt*(Wo^T) + Ht-1*(Ro^T) + Po (.) Ct + Wbo + Rbo)
|
|
333
|
+
* Ht = ot (.) h(Ct)
|
|
334
|
+
)DOC";
|
|
335
|
+
|
|
336
|
+
ONNX_OPERATOR_SET_SCHEMA(
|
|
337
|
+
LSTM,
|
|
338
|
+
14,
|
|
339
|
+
OpSchema()
|
|
340
|
+
.SetDoc(GET_OP_DOC_STR(std::string(LSTM_ver14_doc) + GenerateOptionalArgumentsDoc()))
|
|
341
|
+
.Attr(
|
|
342
|
+
"activations",
|
|
343
|
+
"A list of 3 (or 6 if bidirectional) activation functions "
|
|
344
|
+
"for input, output, forget, cell, and hidden. The activation functions must "
|
|
345
|
+
"be one of the activation functions specified above. Optional: See the equations "
|
|
346
|
+
"for default if not specified.",
|
|
347
|
+
AttributeProto::STRINGS,
|
|
348
|
+
OPTIONAL_VALUE)
|
|
349
|
+
.Attr(
|
|
350
|
+
"layout",
|
|
351
|
+
"The shape format of inputs X, initial_h, initial_c and outputs Y, Y_h, Y_c. "
|
|
352
|
+
"If 0, the following shapes are expected: "
|
|
353
|
+
"X.shape = [seq_length, batch_size, input_size], "
|
|
354
|
+
"Y.shape = [seq_length, num_directions, batch_size, hidden_size], "
|
|
355
|
+
"initial_h.shape = Y_h.shape = initial_c.shape = Y_c.shape = "
|
|
356
|
+
"[num_directions, batch_size, hidden_size]. "
|
|
357
|
+
"If 1, the following shapes are expected: "
|
|
358
|
+
"X.shape = [batch_size, seq_length, input_size], "
|
|
359
|
+
"Y.shape = [batch_size, seq_length, num_directions, hidden_size], "
|
|
360
|
+
"initial_h.shape = Y_h.shape = initial_c.shape = Y_c.shape = "
|
|
361
|
+
"[batch_size, num_directions, hidden_size].",
|
|
362
|
+
AttributeProto::INT,
|
|
363
|
+
static_cast<int64_t>(0))
|
|
364
|
+
.Attr("input_forget", "Couple the input and forget gates if 1.", AttributeProto::INT, static_cast<int64_t>(0))
|
|
365
|
+
.Input(
|
|
366
|
+
1,
|
|
367
|
+
"W",
|
|
368
|
+
"The weight tensor for the gates. Concatenation of `W[iofc]` and "
|
|
369
|
+
"`WB[iofc]` (if bidirectional) along dimension 0. The tensor has shape "
|
|
370
|
+
"`[num_directions, 4*hidden_size, input_size]`.",
|
|
371
|
+
"T",
|
|
372
|
+
OpSchema::Single,
|
|
373
|
+
true,
|
|
374
|
+
1,
|
|
375
|
+
OpSchema::Differentiable)
|
|
376
|
+
.Input(
|
|
377
|
+
2,
|
|
378
|
+
"R",
|
|
379
|
+
"The recurrence weight tensor. Concatenation of `R[iofc]` and "
|
|
380
|
+
"`RB[iofc]` (if bidirectional) along dimension 0. This tensor has shape "
|
|
381
|
+
"`[num_directions, 4*hidden_size, hidden_size]`.",
|
|
382
|
+
"T",
|
|
383
|
+
OpSchema::Single,
|
|
384
|
+
true,
|
|
385
|
+
1,
|
|
386
|
+
OpSchema::Differentiable)
|
|
387
|
+
.Input(
|
|
388
|
+
3,
|
|
389
|
+
"B",
|
|
390
|
+
"The bias tensor for input gate. Concatenation of `[Wb[iofc], Rb[iofc]]`, "
|
|
391
|
+
"and `[WBb[iofc], RBb[iofc]]` (if bidirectional) along dimension 0. This "
|
|
392
|
+
"tensor has shape `[num_directions, 8*hidden_size]`. Optional: If not "
|
|
393
|
+
"specified - assumed to be 0.",
|
|
394
|
+
"T",
|
|
395
|
+
OpSchema::Optional,
|
|
396
|
+
true,
|
|
397
|
+
1,
|
|
398
|
+
OpSchema::Differentiable)
|
|
399
|
+
.Input(
|
|
400
|
+
6,
|
|
401
|
+
"initial_c",
|
|
402
|
+
"Optional initial value of the cell. If not specified - assumed "
|
|
403
|
+
"to be 0. It has shape `[num_directions, batch_size, hidden_size]`.",
|
|
404
|
+
"T",
|
|
405
|
+
OpSchema::Optional,
|
|
406
|
+
true,
|
|
407
|
+
1,
|
|
408
|
+
OpSchema::NonDifferentiable)
|
|
409
|
+
.Input(
|
|
410
|
+
7,
|
|
411
|
+
"P",
|
|
412
|
+
"The weight tensor for peepholes. Concatenation of `P[iof]` and "
|
|
413
|
+
"`PB[iof]` (if bidirectional) along dimension 0. It has shape "
|
|
414
|
+
"`[num_directions, 3*hidde_size]`. Optional: If not specified - "
|
|
415
|
+
"assumed to be 0.",
|
|
416
|
+
"T",
|
|
417
|
+
OpSchema::Optional,
|
|
418
|
+
true,
|
|
419
|
+
1,
|
|
420
|
+
OpSchema::Differentiable)
|
|
421
|
+
.FillUsing(RNNDocGenerator_opset14("LSTM"))
|
|
422
|
+
.Output(
|
|
423
|
+
2,
|
|
424
|
+
"Y_c",
|
|
425
|
+
"The last output value of the cell. It has shape "
|
|
426
|
+
"`[num_directions, batch_size, hidden_size]`.",
|
|
427
|
+
"T",
|
|
428
|
+
OpSchema::Optional,
|
|
429
|
+
true,
|
|
430
|
+
1,
|
|
431
|
+
OpSchema::Differentiable));
|
|
432
|
+
|
|
433
|
+
static const char* RNN_ver14_doc = R"DOC(
|
|
434
|
+
Computes an one-layer simple RNN. This operator is usually supported
|
|
435
|
+
via some custom implementation such as CuDNN.
|
|
436
|
+
|
|
437
|
+
Notations:
|
|
438
|
+
|
|
439
|
+
* `X` - input tensor
|
|
440
|
+
* `i` - input gate
|
|
441
|
+
* `t` - time step (t-1 means previous time step)
|
|
442
|
+
* `Wi` - W parameter weight matrix for input gate
|
|
443
|
+
* `Ri` - R recurrence weight matrix for input gate
|
|
444
|
+
* `Wbi` - W parameter bias vector for input gate
|
|
445
|
+
* `Rbi` - R parameter bias vector for input gate
|
|
446
|
+
* `WBi` - W parameter weight matrix for backward input gate
|
|
447
|
+
* `RBi` - R recurrence weight matrix for backward input gate
|
|
448
|
+
* `WBbi` - WR bias vectors for backward input gate
|
|
449
|
+
* `RBbi` - RR bias vectors for backward input gate
|
|
450
|
+
* `H` - Hidden state
|
|
451
|
+
* `num_directions` - 2 if direction == bidirectional else 1
|
|
452
|
+
|
|
453
|
+
Activation functions:
|
|
454
|
+
|
|
455
|
+
* Relu(x) - max(0, x)
|
|
456
|
+
* Tanh(x) - (1 - e^{-2x})/(1 + e^{-2x})
|
|
457
|
+
* Sigmoid(x) - 1/(1 + e^{-x})
|
|
458
|
+
|
|
459
|
+
NOTE: Below are optional
|
|
460
|
+
|
|
461
|
+
* Affine(x) - alpha*x + beta
|
|
462
|
+
* LeakyRelu(x) - x if x >= 0 else alpha * x
|
|
463
|
+
* ThresholdedRelu(x) - x if x >= alpha else 0
|
|
464
|
+
* ScaledTanh(x) - alpha*Tanh(beta*x)
|
|
465
|
+
* HardSigmoid(x) - min(max(alpha*x + beta, 0), 1)
|
|
466
|
+
* Elu(x) - x if x >= 0 else alpha*(e^x - 1)
|
|
467
|
+
* Softsign(x) - x/(1 + |x|)
|
|
468
|
+
* Softplus(x) - log(1 + e^x)
|
|
469
|
+
|
|
470
|
+
Equations (Default: f=Tanh):
|
|
471
|
+
|
|
472
|
+
* Ht = f(Xt*(Wi^T) + Ht-1*(Ri^T) + Wbi + Rbi)
|
|
473
|
+
)DOC";
|
|
474
|
+
|
|
475
|
+
ONNX_OPERATOR_SET_SCHEMA(
|
|
476
|
+
RNN,
|
|
477
|
+
14,
|
|
478
|
+
OpSchema()
|
|
479
|
+
.SetDoc(GET_OP_DOC_STR(std::string(RNN_ver14_doc) + GenerateOptionalArgumentsDoc()))
|
|
480
|
+
.Attr(
|
|
481
|
+
"activations",
|
|
482
|
+
"One (or two if bidirectional) activation function for "
|
|
483
|
+
"input gate. The activation function must be one of the activation "
|
|
484
|
+
"functions specified above. Optional: Default `Tanh` if not specified.",
|
|
485
|
+
AttributeProto::STRINGS,
|
|
486
|
+
std::vector<std::string>{"Tanh", "Tanh"})
|
|
487
|
+
.Input(
|
|
488
|
+
1,
|
|
489
|
+
"W",
|
|
490
|
+
"The weight tensor for input gate. Concatenation of `Wi` and `WBi` "
|
|
491
|
+
"(if bidirectional). The tensor has shape "
|
|
492
|
+
"`[num_directions, hidden_size, input_size]`.",
|
|
493
|
+
"T",
|
|
494
|
+
OpSchema::Single,
|
|
495
|
+
true,
|
|
496
|
+
1,
|
|
497
|
+
OpSchema::Differentiable)
|
|
498
|
+
.Input(
|
|
499
|
+
2,
|
|
500
|
+
"R",
|
|
501
|
+
"The recurrence weight tensor. Concatenation of `Ri` and `RBi` "
|
|
502
|
+
"(if bidirectional). The tensor has shape "
|
|
503
|
+
"`[num_directions, hidden_size, hidden_size]`.",
|
|
504
|
+
"T",
|
|
505
|
+
OpSchema::Single,
|
|
506
|
+
true,
|
|
507
|
+
1,
|
|
508
|
+
OpSchema::Differentiable)
|
|
509
|
+
.Input(
|
|
510
|
+
3,
|
|
511
|
+
"B",
|
|
512
|
+
"The bias tensor for input gate. Concatenation of `[Wbi, Rbi]` "
|
|
513
|
+
"and `[WBbi, RBbi]` (if bidirectional). The tensor has shape "
|
|
514
|
+
"`[num_directions, 2*hidden_size]`. Optional: If not specified - assumed "
|
|
515
|
+
"to be 0.",
|
|
516
|
+
"T",
|
|
517
|
+
OpSchema::Optional,
|
|
518
|
+
true,
|
|
519
|
+
1,
|
|
520
|
+
OpSchema::Differentiable)
|
|
521
|
+
.FillUsing(RNNDocGenerator_opset14("RNN")));
|
|
522
|
+
|
|
8
523
|
std::function<void(OpSchema&)> RNNDocGeneratorOld(const char* /*name*/) {
|
|
9
524
|
return [=](OpSchema& schema) {
|
|
10
525
|
schema.Attr(
|
|
@@ -243,8 +758,8 @@ void RNNShapeInference1(InferenceContext& ctx) {
|
|
|
243
758
|
// Documentation suggests that the output Y is absent in this case
|
|
244
759
|
// Different tests seem to disagree on whether Y_h and Y_c, if present,
|
|
245
760
|
// should be in positions 0 & 1 or 1 & 2. updateOutputShape(ctx, 0,
|
|
246
|
-
// {num_directions, batch_size, hidden_size});
|
|
247
|
-
// updateOutputShape(ctx, 1, {num_directions, batch_size, hidden_size});
|
|
761
|
+
// {num_directions, batch_size, hidden_size}); // Y_h if (num_outputs > 1)
|
|
762
|
+
// updateOutputShape(ctx, 1, {num_directions, batch_size, hidden_size}); //
|
|
248
763
|
// Y_c
|
|
249
764
|
}
|
|
250
765
|
}
|
onnx/defs/schema.cc
CHANGED
|
@@ -5,6 +5,8 @@
|
|
|
5
5
|
#include "onnx/defs/schema.h"
|
|
6
6
|
|
|
7
7
|
#include <stdexcept>
|
|
8
|
+
#include <string>
|
|
9
|
+
#include <string_view>
|
|
8
10
|
#include <unordered_set>
|
|
9
11
|
#include <utility>
|
|
10
12
|
|
|
@@ -107,30 +109,10 @@ OpSchemaRegistry* OpSchemaRegistry::Instance() {
|
|
|
107
109
|
|
|
108
110
|
void OpSchema::CheckInputOutputType(struct InferenceContext& ctx) const {
|
|
109
111
|
std::unordered_map<std::string, std::string> type_constraints;
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
"::",
|
|
115
|
-
Name(),
|
|
116
|
-
":",
|
|
117
|
-
since_version(),
|
|
118
|
-
") takes zero inputs, but got ",
|
|
119
|
-
ctx.getNumInputs(),
|
|
120
|
-
" in graph");
|
|
121
|
-
}
|
|
122
|
-
if (outputs_.empty() && ctx.getNumOutputs() > 0) {
|
|
123
|
-
fail_check(
|
|
124
|
-
"Node (",
|
|
125
|
-
domain(),
|
|
126
|
-
"::",
|
|
127
|
-
Name(),
|
|
128
|
-
":",
|
|
129
|
-
since_version(),
|
|
130
|
-
") yields zero outputs, but got ",
|
|
131
|
-
ctx.getNumOutputs(),
|
|
132
|
-
" in graph");
|
|
133
|
-
}
|
|
112
|
+
// Check the number of inputs / output.
|
|
113
|
+
VerifyInputNum(ctx.getNumInputs());
|
|
114
|
+
VerifyOutputNum(ctx.getNumOutputs());
|
|
115
|
+
|
|
134
116
|
// check all input types
|
|
135
117
|
for (size_t in_idx = 0; in_idx < ctx.getNumInputs(); ++in_idx) {
|
|
136
118
|
// If the last input is Variadic by definition, checker still needs to check the rest of actual input's type
|
|
@@ -200,41 +182,8 @@ void OpSchema::Verify(const NodeProto& node) const {
|
|
|
200
182
|
fail_check("Operator '", name_, "' has been deprecated since version ", since_version_);
|
|
201
183
|
}
|
|
202
184
|
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
fail_check(
|
|
206
|
-
"Node (",
|
|
207
|
-
node.name(),
|
|
208
|
-
") has input size ",
|
|
209
|
-
node.input_size(),
|
|
210
|
-
" not in range [min=",
|
|
211
|
-
min_input_,
|
|
212
|
-
", max=",
|
|
213
|
-
max_input_,
|
|
214
|
-
"].");
|
|
215
|
-
}
|
|
216
|
-
|
|
217
|
-
if (!num_inputs_allowed_(node.input_size())) {
|
|
218
|
-
fail_check("Node (", node.name(), ") has input size ", node.input_size(), " not in allowed input sizes.");
|
|
219
|
-
}
|
|
220
|
-
|
|
221
|
-
// Check the number of outputs.
|
|
222
|
-
if (node.output_size() < min_output_ || node.output_size() > max_output_) {
|
|
223
|
-
fail_check(
|
|
224
|
-
"Node (",
|
|
225
|
-
node.name(),
|
|
226
|
-
") has output size ",
|
|
227
|
-
node.output_size(),
|
|
228
|
-
" not in range [min=",
|
|
229
|
-
min_output_,
|
|
230
|
-
", max=",
|
|
231
|
-
max_output_,
|
|
232
|
-
"].");
|
|
233
|
-
}
|
|
234
|
-
|
|
235
|
-
if (!num_outputs_allowed_(node.output_size())) {
|
|
236
|
-
fail_check("Node (", node.name(), "has output size ", node.output_size(), " not in allowed output sizes.");
|
|
237
|
-
}
|
|
185
|
+
VerifyInputNum(node.input_size(), node.name());
|
|
186
|
+
VerifyOutputNum(node.output_size(), node.name());
|
|
238
187
|
|
|
239
188
|
// Check the values of inputs / outputs
|
|
240
189
|
for (int in_idx = 0; in_idx < node.input_size(); ++in_idx) {
|
|
@@ -381,6 +330,51 @@ void OpSchema::Verify(const NodeProto& node) const {
|
|
|
381
330
|
// Phew. All verifications passed.
|
|
382
331
|
}
|
|
383
332
|
|
|
333
|
+
std::string OpSchema::VerifyFailPrefix(std::string_view node_name) const {
|
|
334
|
+
std::string str = "Node";
|
|
335
|
+
if (!node_name.empty()) {
|
|
336
|
+
str = str + "(" + std::string(node_name) + ")";
|
|
337
|
+
}
|
|
338
|
+
str = str + " with schema(" + domain() + "::" + Name() + ":" + std::to_string(since_version()) + ")";
|
|
339
|
+
return str;
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
void OpSchema::VerifyInputNum(int input_num, std::string_view node_name) const {
|
|
343
|
+
if (input_num < min_input_ || input_num > max_input_) {
|
|
344
|
+
fail_check(
|
|
345
|
+
VerifyFailPrefix(node_name),
|
|
346
|
+
" has input size ",
|
|
347
|
+
input_num,
|
|
348
|
+
" not in range [min=",
|
|
349
|
+
min_input_,
|
|
350
|
+
", max=",
|
|
351
|
+
max_input_,
|
|
352
|
+
"].");
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
if (!num_inputs_allowed_(input_num)) {
|
|
356
|
+
fail_check(VerifyFailPrefix(node_name), " has input size ", input_num, " not in allowed input sizes.");
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
void OpSchema::VerifyOutputNum(int output_num, std::string_view node_name) const {
|
|
361
|
+
if (output_num < min_output_ || output_num > max_output_) {
|
|
362
|
+
fail_check(
|
|
363
|
+
VerifyFailPrefix(node_name),
|
|
364
|
+
" has output size ",
|
|
365
|
+
output_num,
|
|
366
|
+
" not in range [min=",
|
|
367
|
+
min_output_,
|
|
368
|
+
", max=",
|
|
369
|
+
max_output_,
|
|
370
|
+
"].");
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
if (!num_outputs_allowed_(output_num)) {
|
|
374
|
+
fail_check(VerifyFailPrefix(node_name), " has output size ", output_num, " not in allowed output sizes.");
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
|
|
384
378
|
OpSchema& OpSchema::SinceVersion(OperatorSetVersion v) {
|
|
385
379
|
since_version_ = v;
|
|
386
380
|
|