Skip to content

Commit

Permalink
disable tests
Browse files Browse the repository at this point in the history
  • Loading branch information
sbalandi committed Feb 27, 2024
1 parent cfa7f10 commit 1822371
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,46 @@ std::vector<std::string> disabledTestPatterns() {
R"(smoke_LPT/ConvolutionTransformation.CompareWithRefImpl/f32_\[.*,3,16,16\]_CPU_f32_rank=4D_fq_on_data=\{level=256_shape=\[1\]_input_low=\{ 0 \}_input_high=\{ 255 \}_output_low=\{ .*18.7 \}_output_high\{ 18.8 \}_precision=\}_fq_on_weights=\{_255_\[6,1,1,1\]_\{ .*1.52806e.*39, .*0.2, .*0.3, .*0.3, .*0.2, .*0.1 \}_\{ 1.52806e.*39, 0.2, 0.3, 0.3, 0.2, 0.1 \}\})",
// Issue: 132494
R"(.*smoke_Inverse.*bf16.*)",
// Issue: CVS-133173
R"(.*smoke_LPT_4D/ConvolutionBackpropDataTransformation.CompareWithRefImpl/f32_\[1,8,16,16\]_CPU_f32_\[16,16\]_level=256_shape=\[1,1,1,1\]_input_low={ 0 }_input_high={ 25.5 }_output_low={ 0 }_output_high{ 25.5 }_precision=__255_\[1,1,1,1\]_{ -12.7 }_{ 12.7 }_{}.*)",
R"(.*smoke_LPT_4D/ConvolutionBackpropDataTransformation.CompareWithRefImpl/f32_\[1,8,16,16\]_CPU_f32_\[16,16\]_level=256_shape=\[\]_input_low={ 0 }_input_high={ 25.5 }_output_low={ 0 }_output_high{ 25.5 }_precision=__255_\[\]_{ -12.7 }_{ 12.7 }_{}.*)",
R"(.*smoke_LPT_4D/ConvolutionBackpropDataTransformation.CompareWithRefImpl/f32_\[1,8,16,16\]_CPU_f32_\[16,16\]_level=256_shape=\[1,1,1,1\]_input_low={ 0 }_input_high={ 255 }_output_low={ -12.7 }_output_high{ 12.8 }_precision=__255_\[1,1,1,1\]_{ -127 }_{ 127 }_{}.*)",
R"(.*smoke_LPT_4D/ConvolutionBackpropDataTransformation.CompareWithRefImpl/f32_\[1,8,16,16\]_CPU_f32_\[16,16\]_level=256_shape=\[1,1,1,1\]_input_low={ 0 }_input_high={ 255 }_output_low={ -12.7 }_output_high{ 12.8 }_precision=__0_\[\]_{ }_{ }___f32__{ (12|1000) }_f32_\[\]_1_1_undefined_0__{ 4 }_f32_\[\]_1_1_undefined.*)",
R"(.*smoke_LPT/ConvolutionQDqTransformation.CompareWithRefImpl/f32_\[(1,3,4,4|4,3,4,4)\]_CPU_f32_level=256_shape=\[1,1,1,1\]_input_low={ -12.8 }_input_high={ 12.7 }_output_low={ 0 }_output_high={ 255 }_precision=f32__u8___f32__{ 128 }_f32_\[\]_1_1_u8_1__{ 0.1 }_f32_\[\]_1_1_undefined__{, 15}_f32_\[\]__255_\[1,1,1,1\]_{ -128 }_{ 127 }__i8___f32__{ -128 }_f32_\[\]_1_1_i8_1__{ 0.2 }_f32_\[\]_1_1_undefinedConvolution_f32.*)",
R"(.*smoke_LPT/ConvolutionQDqTransformation.CompareWithRefImpl/f32_\[(1,3,4,4|4,3,4,4)\]_CPU_f32_level=256_shape=\[1,1,1,1\]_input_low={ -12.8 }_input_high={ 12.7 }_output_low={ 0 }_output_high={ 255 }_precision=f32__u8___f32_{}__{ 0.1 }_f32_\[\]_1_1_undefined__{, 15}_f32_\[\]__255_\[1,1,1,1\]_{ -128 }_{ 127 }__i8___f32_{}__{ 0.2 }_f32_\[\]_1_1_undefinedConvolution_u8.*)",
R"(.*smoke_LPT/MultiplyTransformation.CompareWithRefImpl/f32_\[1,3,16,16\]_CPU_f32_undefined__on_branch1_0_2.55_0_2.55_on_branch2_-1.28_1.27_-1.28_1.27_1.*)",
R"(.*smoke_LPT/MultiplyTransformation.CompareWithRefImpl/f32_\[1,3,16,16\]_CPU_f32_broadcast1_undefined__on_branch1_-1.28_1.27_-1.28_1.27_on_branch2_0_2.55_0_2.55_0.*)",
R"(.*smoke_LPT/MultiplyTransformation.CompareWithRefImpl/f32_\[1,3,16,16\]_CPU_f32_broadcast2_undefined__on_branch1_0_2.55_0_2.55_on_branch2_-1.27_1.28_-1.27_1.28_0.*)",
R"(.*smoke_LPT/ConvolutionTransformation.CompareWithRefImpl/f32_\[(1,3,16,16|4,3,16,16)\]_CPU_f32_rank=4D_fq_on_data={level=256_shape=\[1\]_input_low={ 0 }_input_high={ 255 }_output_low={ -18.7 }_output_high{ 18.8 }_precision=}_fq_on_weights={_255_\[1\]_{ -18.7 }_{ 18.7 }}.*)",
R"(.*smoke_LPT/ConvolutionTransformation.CompareWithRefImpl/f32_\[(1,3,16,16|4,3,16,16)\]_CPU_f32_rank=4D_fq_on_data={level=256_shape=\[1\]_input_low={ 0 }_input_high={ 255 }_output_low={ -18.7 }_output_high{ 18.8 }_precision=}_fq_on_weights={_255_\[6,1,1,1\]_{ -1.52806e-39, -1.52806e-39, -1.52806e-39, -1.52806e-39, -1.52806e-39, -1.52806e-39 }_{ 1.52806e-39, 1.52806e-39, 1.52806e-39, 1.52806e-39, 1.52806e-39, 1.52806e-39 }}.*)",
R"(.*smoke_LPT/RecurrentCellTransformation.CompareWithRefImpl/f32_\[1,2,16\]_CPU_f32FQ_X_level=256_shape=\[\]_input_low={ 0 }_input_high={ 2.55 }_output_low={ 0 }_output_high={ 255 }_precision=_DQ_X___f32_{}__{ 0.01 }_undefined_\[\]_0_1_undefined_FQ_W_level=255_shape=\[\]_input_low={ -1.27 }_input_high={ 1.27 }_output_low={ -1.27 }_output_high={ 1.27 }_precision=_DQ_W_{}.*)",
R"(.*smoke_LPT/SubtractTransformation.CompareWithRefImpl/f16_\[1,3,16,16\]_CPU_f32.*)",
R"(.*smoke_LPT/FakeQuantizeTransformation.CompareWithRefImpl/f32_\[1,32,72,48\]_CPU_f32_0_level=65536_shape=\[\]_input_low={ 0 }_input_high={ 65.535 }_output_low={ 0 }_output_high={ 65.535 }_precision=.*)",
R"(.*smoke_LPT/FakeQuantizeTransformation.CompareWithRefImpl/f32_\[1,32,72,48\]_CPU_f32_0_level=65536_shape=\[\]_input_low={ -32.768 }_input_high={ 32.767 }_output_low={ -32.768 }_output_high={ 32.767 }_precision=.*)",
R"(.*smoke_LPT/MoveFakeQuantizeTransformation.CompareWithRefImpl/f32_\[(1|4),1,16,16\]_CPU_f32SPLIT:0_OP:_FQ:level=256_shape=\[\]_input_low={ 0 }_input_high={ 2.55 }_output_low={ 0 }_output_high={ 2.55 }_precision=_DQ:{}.*)",
R"(.*smoke_LPT/MoveFakeQuantizeTransformation.CompareWithRefImpl/f32_\[(1|4),1,16,16\]_CPU_f32SPLIT:0_OP:_FQ:level=256_shape=\[\]_input_low={ 0 }_input_high={ 2.55 }_output_low={ 0 }_output_high={ 255 }_precision=_DQ:__f32_{}__{ 0.01 }_undefined_\[\]_0_1_undefined.*)",
R"(.*smoke_LPT/MoveFakeQuantizeTransformation.CompareWithRefImpl/f32_\[(1|4),1,16,16\]_CPU_f32SPLIT:0_OP:relu_FQ:level=256_shape=\[\]_input_low={ 0 }_input_high={ 2.55 }_output_low={ 0 }_output_high={ 255 }_precision=_DQ:__f32_{}__{ 0.01 }_undefined_\[\]_0_1_undefined.*)",
R"(.*smoke_LPT/MoveFakeQuantizeTransformation.CompareWithRefImpl/f32_\[(1|4),1,16,16\]_CPU_f32SPLIT:0_OP:relu_FQ:level=256_shape=\[1,6,1,1\]_input_low={ 0, 0, 0, 0, 0, 0 }_input_high={ 2.55, 1.275, 0.85, 0.6375, 0.51, 0.425 }_output_low={ -128, -128, -128, -128, -128, -128 }_output_high={ 127, 127, 127, 127, 127, 127 }_precision=_DQ:{}.*)",
R"(.*smoke_LPT/MoveFakeQuantizeTransformation.CompareWithRefImpl/f32_\[(1|4),1,16,16\]_CPU_f32SPLIT:0_OP:_FQ:level=256_shape=\[1,6,1,1\]_input_low={ 0, 0, 0, 0, 0, 0 }_input_high={ 2.55, 1.275, 0.85, 0.6375, 0.51, 0.425 }_output_low={ 0, 0, 0, 0, 0, 0 }_output_high={ 255, 127.5, 85, 63.75, 51, 42.5 }_precision=_DQ:__f32_{}__{ 0.01, 0.02, 0.03, 0.04, 0.05, 0.06 }_f32_\[1,6,1,1\]_1_1_undefined.*)",
R"(.*smoke_LPT/MoveFakeQuantizeTransformation.CompareWithRefImpl/f32_\[(1|4),1,16,16\]_CPU_f32SPLIT:0_OP:_FQ:level=256_shape=\[1,6,1,1\]_input_low={ 0, 0, 0, 0, 0, 0 }_input_high={ 2.55, 1.275, 0.85, 0.6375, 0.51, 0.425 }_output_low={ 0, 0, 0, 0, 0, 0 }_output_high={ 255, 127.5, 85, 63.75, 51, 42.5 }_precision=_DQ:__f32__{ -127, -63.5, -42.3333, -31.75, -25.4, -21.1667 }_f32_\[1,6,1,1\]_1_1_undefined_0__{ 0.01 }_undefined_\[\]_0_1_undefined.*)",
R"(.*smoke_LPT/MoveFakeQuantizeTransformation.CompareWithRefImpl/f32_\[(1|4),1,16,16\]_CPU_f32SPLIT:1_OP:_FQ:level=256_shape=\[1,6,1,1\]_input_low={ 0, 0, 0, 0, 0, 0 }_input_high={ 2.55, 1.275, 0.85, 0.6375, 0.51, 0.425 }_output_low={ 0, 0, 0, 0, 0, 0 }_output_high={ 255, 127.5, 85, 63.75, 51, 42.5 }_precision=_DQ:__f32_{}__{ 0.01, 0.02, 0.03, 0.04, 0.05, 0.06 }_f32_\[1,6,1,1\]_1_1_undefined.*)",
R"(.*smoke_LPT/MoveFakeQuantizeTransformation.CompareWithRefImpl/f32_\[(1|4),1,16,16\]_CPU_f32SPLIT:1_OP:_FQ:level=256_shape=\[1,6,1,1\]_input_low={ 0, 0, 0, 0, 0, 0 }_input_high={ 2.55, 1.275, 0.85, 0.6375, 0.51, 0.425 }_output_low={ 0, 0, 0, 0, 0, 0 }_output_high={ 255, 127.5, 85, 63.75, 51, 42.5 }_precision=_DQ:__f32__{ -127, -63.5, -42.3333, -31.75, -25.4, -21.1667 }_f32_\[1,6,1,1\]_1_1_undefined_0__{ 0.01 }_undefined_\[\]_0_1_undefined.*)",
R"(.*smoke_LPT/EliminateFakeQuantizeTransformation.CompareWithRefImpl/CPU_f32_level=256_shape=\[\]_input_low={ 0 }_input_high={ 127.5 }_output_low={ 0 }_output_high{ 127.5 }_precision=f32_level=256_shape=\[\]_input_low={ 0 }_input_high={ (127.5|121.429) }_output_low={ 0 }_output_high{ (127.5|121.429) }_precision=f32.*)",
R"(.*smoke_LPT/MatMulWithOptimizedConstantFq.CompareWithRefImpl/f32_\[1,16\]_\[(10|16),16\]_CPU_level=256_shape=\[1\]_input_low={ 0 }_input_high={ 25.5 }_output_low={ 0 }_output_high{ 25.5 }_precision=_level=255_shape=\[1\]_input_low={ -12.7 }_input_high={ 12.7 }_output_low={ -12.7 }_output_high{ 12.7 }_precision=.*)",
R"(.*smoke_LPT/GroupConvolutionTransformation.CompareWithRefImpl/f32_\[1,6,24,24\]_CPU_f32_4D_\[1,6,24,24\]_\[1,24,18,18\]_3_-1_level=256_shape=\[1,1,1,1\]_input_low={ 0 }_input_high={ 25.5 }_output_low={ 0 }_output_high{ 25.5 }_precision=_wo_reshape__255_\[3,8,1,1,1\]_{ -127 }_{ 127 }.*)",
R"(.*smoke_LPT/GroupConvolutionTransformation.CompareWithRefImpl/f32_\[1,6,24.*\]_CPU_f32_3D_\[1,6,24\]_\[1,24,18.*\]_3_-1_level=256_shape=\[1,1,1.*\]_input_low={ 0 }_input_high={ 25.5 }_output_low={ 0 }_output_high{ 25.5 }_precision=_wo_reshape__255_\[3,8,1,1.*\]_{ -127, -12.7, -1.27, -127, -12.7, -1.27, -127, -12.7, -127, -12.7, -1.27, -127, -12.7, -1.27, -127, -12.7, -127, -12.7, -1.27, -127, -12.7, -1.27, -127, -12.7 }_{ 127, 12.7, 1.27, 127, 12.7, 1.27, 127, 12.7, 127, 12.7, 1.27, 127, 12.7, 1.27, 127, 12.7, 127, 12.7, 1.27, 127, 12.7, 1.27, 127, 12.7 }.*)",
R"(.*smoke_LPT/GroupConvolutionTransformation.CompareWithRefImpl/f32_\[1,6,1,24,24\]_CPU_f32_5D_\[1,6,1,24,24\]_\[1,24,1,18,18\]_3_-1_level=256_shape=\[1,1,1,1,1\]_input_low={ -12.8 }_input_high={ 12.7 }_output_low={ -12.8 }_output_high{ 12.7 }_precision=_reshape_on_weights__255_\[1,1,1,1,1\]_{ -127 }_{ 127 }.*)",
R"(.*smoke_LPT/GroupConvolutionTransformation.CompareWithRefImpl/f32_\[1,24,8,12,12\]_CPU_f32_5D_\[1,24,8,12,12\]_\[1,24,1,1,1\]_3_-1_level=256_shape=\[1,1,1,1,1\]_input_low={ -12.8 }_input_high={ 12.7 }_output_low={ -12.8 }_output_high{ 12.7 }_precision=_reshape_on_weights__255_\[1,1,1,1,1\]_{ -127 }_{ 127 }.*)",
R"(.*smoke_LPT/GroupConvolutionQDqTransformation.CompareWithRefImpl/f32_\[1,6,24,24\]_CPU_f32_level=256_shape=\[1,1,1,1\]_input_low={ -12.8 }_input_high={ 12.7 }_output_low={ 0 }_output_high={ 255 }_precision=f32__u8___f32__{ 128 }_f32_\[\]_1_1_u8_1__{ 0.1 }_f32_\[\]_1_1_undefined__{, 15151515}_f32_\[6,2,5,5\]__255_\[1,1,1,1\]_{ -128 }_{ 127 }__i8___f32__{ -128 }_f32_\[\]_1_1_i8_1__{ 0.2 }_f32_\[\]_1_1_undefinedoutput_original_f32_multiplyAfter=(false|true).*)",
R"(.*smoke_LPT/GroupConvolutionQDqTransformation.CompareWithRefImpl/f32_\[1,6,24,24\]_CPU_f32_level=256_shape=\[1,1,1,1\]_input_low={ -12.8 }_input_high={ 12.7 }_output_low={ 0 }_output_high={ 255 }_precision=f32__u8___f32_{}__{ 0.1 }_f32_\[\]_1_1_undefined__{, 15151515}_f32_\[6,2,5,5\]__255_\[1,1,1,1\]_{ -128 }_{ 127 }__i8___f32_{}__{ 0.2 }_f32_\[\]_1_1_undefinedoutput_original_u8_multiplyAfter=(false|true).*)",
R"(.*smoke_LPT/GroupConvolutionQDqTransformation.CompareWithRefImpl/f32_\[1,6,24,24\]_CPU_f32_level=256_shape=\[1,1,1,1\]_input_low={ -12.8 }_input_high={ 12.7 }_output_low={ 0 }_output_high={ 255 }_precision=f32__u8___f32__{ 128 }_f32_\[\]_1_1_u8_1__{ 0.1 }_f32_\[\]_1_1_undefined__{, 15151515}_i8_\[6,2,5,5\]__0_\[\]_{ }_{ }_{}___f32__{ 127 }_f32_\[\]_1_1_i8_1__{ 0.2 }_f32_\[\]_1_1_undefinedoutput_original_f32_multiplyAfter=(false|true).*)",
R"(.*smoke_LPT/GroupConvolutionQDqTransformation.CompareWithRefImpl/f32_\[1,6,24,24\]_CPU_f32_level=256_shape=\[1,1,1,1\]_input_low={ -12.8 }_input_high={ 12.7 }_output_low={ 0 }_output_high={ 255 }_precision=f32__u8___f32__{ 128 }_f32_\[\]_1_1_u8_1__{ 0.1 }_f32_\[\]_1_1_undefined__{, 15151515}_i8_\[3,2,2,5,5\]__0_\[\]_{ }_{ }_{}___f32__{ 126, 127, 126, 127, 126, 127 }_f32_\[3,2,1,1,1\]_1_1_i8_1__{ 0.1, 0.2, 0.1, 0.2, 0.1, 0.2 }_f32_\[3,2,1,1,1\]_1_1_undefinedoutput_original_f32_multiplyAfter=true.*)",
R"(.*smoke_LPT/MatMulWithConstantTransformation.CompareWithRefImpl/\[(2,3,4|1,1,3,4)\]_f32_CPU_level=256_shape=\[1,1,1\]_input_low={ 0 }_input_high={ 255 }_output_low={ 0, 0, 0 }_output_high={ 255, 25.5, 255 }_precision=_level=256_shape=\[1\]_input_low={ -128 }_input_high={ 127 }_output_low={ -128, -12.8 }_output_high={ 127, 12.7 }_precision=_{}.*)",
R"(.*smoke_LPT/ReduceSumTransformation.CompareWithRefImpl/f32_\[1,3,10,10\]_CPU_f32_level=256_shape=\[1,1,1,1\]_input_low={ 0 }_input_high={ 255 }_output_low={ 0 }_output_high{ 127 }_precision=_keepDims__reduce_axis_2_3_.*)",
R"(.*smoke_LPT_3D/ConvolutionBackpropDataTransformation.CompareWithRefImpl/f32_\[1,8,16\]_CPU_f32_\[16\]_level=256_shape=\[1,1,1\]_input_low={ 0 }_input_high={ 25.5 }_output_low={ 0 }_output_high{ 25.5 }_precision=__255_\[1,1,1\]_{ -12.7 }_{ 12.7 }_{}.*)",
R"(.*smoke_LPT/FuseDequantizeToFakeQuantizeTransformation.CompareWithRefImpl/CPU_f32_0_undefined_\[\]_f32__{}_{}__{ 0.01 }_undefined_\[\]_0_1_undefined_f32_level=256_shape=\[\]_input_low={ 0 }_input_high={ 2.55 }_output_low={ 0 }_output_high={ 2.55 }_precision=.*)",
R"(.*smoke_LPT/FuseDequantizeToFakeQuantizeTransformation.CompareWithRefImpl/CPU_f32_0_undefined_\[\]_f32__{}_{}__{ 0.01, 0.1, 1 }_f32_\[1,3\]_1_1_undefined_f32_level=256_shape=\[\]_input_low={ 0 }_input_high={ 2.55 }_output_low={ 0 }_output_high={ 2.55 }_precision=.*)",

#if defined(OPENVINO_ARCH_ARM)
// Issue: 126177
R"(.*smoke_CompareWithRefs_4D_Bitwise.*/EltwiseLayerCPUTest.*_eltwise_op_type=Bitwise.*_model_type=i32_.*)"
Expand Down
6 changes: 4 additions & 2 deletions src/plugins/template/tests/functional/skip_tests_config.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,10 @@ std::vector<std::string> disabledTestPatterns() {
R"(.*ReferenceGroupNormalization.*_f64*)",
// Precision not high enough to get exact result for the complex test cases
// (both tiny values and very high values necessary)
R"(.*ReferenceInverse.*bf16.*[4,4].*)"};

R"(.*ReferenceInverse.*bf16.*[4,4].*)",
R"(.smoke_CompareWithRefs_static/EltwiseLayerTest.Inference/IS=.*_TS=.*(4.4|1.10|10|2).200.*_eltwise_op_type=Mod_secondary_input_type=PARAMETER_opType=VECTOR_model_type=f32_InType=undefined_OutType=undefined.*)",
R"(.smoke_CompareWithRefs_static/EltwiseLayerTest.Inference/IS=.*_TS=\(2.17.5.1\)_\(1.17.1.4.*eltwise_op_type=Mod_secondary_input_type=PARAMETER_opType=VECTOR_model_type=f16_InType=undefined_OutType=undefined_.*)",
R"(.smoke_CompareWithRefs_static/EltwiseLayerTest.Inference/IS=.*_TS=.*(2.200|10.200|1.10.100|4.4.16|1.2.4|1.4.4|1.4.4.1).*eltwise_op_type=Mod_secondary_input_type=PARAMETER_opType=VECTOR_model_type=f16_InType=undefined_OutType=undefined.*)"};
#ifdef _WIN32
// CVS-63989
retVector.emplace_back(R"(.*ReferenceSigmoidLayerTest.*u64.*)");
Expand Down

0 comments on commit 1822371

Please sign in to comment.