2018-09-11 02:07:51 +08:00
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
2019-03-29 21:42:58 +08:00
// Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
2018-09-11 02:07:51 +08:00
// Third party copyrights are property of their respective owners.
# include "test_precomp.hpp"
# include "npy_blob.hpp"
# include <opencv2/dnn/shape_utils.hpp>
2023-10-18 15:41:47 +08:00
# include <numeric>
2018-09-11 02:07:51 +08:00
namespace opencv_test { namespace {
2023-12-11 21:42:51 +08:00
void yoloPostProcessing (
std : : vector < Mat > & outs ,
std : : vector < int > & keep_classIds ,
std : : vector < float > & keep_confidences ,
std : : vector < Rect2d > & keep_boxes ,
float conf_threshold ,
float iou_threshold ,
2024-07-02 23:26:34 +08:00
const std : : string & model_name ,
const int nc = 80 ) ;
2023-12-11 21:42:51 +08:00
2018-09-11 02:07:51 +08:00
template < typename TString >
2019-06-20 21:43:28 +08:00
static std : : string _tf ( TString filename , bool required = true )
2018-09-11 02:07:51 +08:00
{
2019-06-20 21:43:28 +08:00
return findDataFile ( std : : string ( " dnn/onnx/ " ) + filename , required ) ;
2018-09-11 02:07:51 +08:00
}
class Test_ONNX_layers : public DNNTestLayer
{
public :
2019-06-20 21:43:28 +08:00
bool required ;
Test_ONNX_layers ( ) : required ( true ) { }
2018-09-11 02:07:51 +08:00
enum Extension
{
npy ,
pb
} ;
2023-03-21 19:50:53 +08:00
void testInputShapes ( const Net & net , const std : : vector < Mat > & inps )
{
std : : vector < MatShape > inLayerShapes ;
std : : vector < MatShape > outLayerShapes ;
net . getLayerShapes ( MatShape ( ) , 0 , inLayerShapes , outLayerShapes ) ;
ASSERT_EQ ( inLayerShapes . size ( ) , inps . size ( ) ) ;
for ( int i = 0 ; i < inps . size ( ) ; + + i ) {
bool hasDynamicShapes = inLayerShapes [ i ] . empty ( ) ;
if ( hasDynamicShapes )
continue ;
if ( inLayerShapes [ i ] . size ( ) = = 1 ) { // 1D input
ASSERT_EQ ( shape ( inLayerShapes [ i ] [ 0 ] , 1 ) , shape ( inps [ i ] ) ) ;
} else {
// Compare all axes except batch dimension which is variable.
inLayerShapes [ i ] [ 0 ] = inps [ i ] . size [ 0 ] ;
ASSERT_EQ ( inLayerShapes [ i ] , shape ( inps [ i ] ) ) ;
}
}
}
2019-03-29 21:42:58 +08:00
void testONNXModels ( const String & basename , const Extension ext = npy ,
2023-08-04 16:28:51 +08:00
double l1 = 0 , double lInf = 0 , const bool useSoftmax = false ,
2023-11-03 17:34:09 +08:00
bool checkNoFallbacks = true , int numInps = 1 ,
2023-11-20 18:45:37 +08:00
bool testShapes = true , bool useWinograd = true )
2018-09-11 02:07:51 +08:00
{
2019-06-20 21:43:28 +08:00
String onnxmodel = _tf ( " models/ " + basename + " .onnx " , required ) ;
2020-03-06 04:53:50 +08:00
std : : vector < Mat > inps ( numInps ) ;
Mat ref ;
2018-09-11 02:07:51 +08:00
if ( ext = = npy ) {
2020-03-06 04:53:50 +08:00
for ( int i = 0 ; i < numInps ; + + i )
inps [ i ] = blobFromNPY ( _tf ( " data/input_ " + basename + ( numInps > 1 ? format ( " _%d " , i ) : " " ) + " .npy " ) ) ;
2018-09-11 02:07:51 +08:00
ref = blobFromNPY ( _tf ( " data/output_ " + basename + " .npy " ) ) ;
}
else if ( ext = = pb ) {
2020-03-06 04:53:50 +08:00
for ( int i = 0 ; i < numInps ; + + i )
inps [ i ] = readTensorFromONNX ( _tf ( " data/input_ " + basename + ( numInps > 1 ? format ( " _%d " , i ) : " " ) + " .pb " ) ) ;
2018-09-11 02:07:51 +08:00
ref = readTensorFromONNX ( _tf ( " data/output_ " + basename + " .pb " ) ) ;
}
else
CV_Error ( Error : : StsUnsupportedFormat , " Unsupported extension " ) ;
2020-03-06 04:53:50 +08:00
checkBackend ( & inps [ 0 ] , & ref ) ;
2018-09-11 02:07:51 +08:00
Net net = readNetFromONNX ( onnxmodel ) ;
ASSERT_FALSE ( net . empty ( ) ) ;
2023-11-03 17:34:09 +08:00
if ( testShapes )
testInputShapes ( net , inps ) ;
2023-03-21 19:50:53 +08:00
2018-09-11 02:07:51 +08:00
net . setPreferableBackend ( backend ) ;
net . setPreferableTarget ( target ) ;
2023-11-20 18:45:37 +08:00
net . enableWinograd ( useWinograd ) ;
2018-09-11 02:07:51 +08:00
2020-03-17 22:31:01 +08:00
std : : vector < String > inputNames ;
2020-03-06 04:53:50 +08:00
for ( int i = 0 ; i < numInps ; + + i )
2020-03-17 22:31:01 +08:00
inputNames . push_back ( format ( " %d " , i ) ) ;
net . setInputsNames ( inputNames ) ;
for ( int i = 0 ; i < numInps ; + + i )
net . setInput ( inps [ i ] , inputNames [ i ] ) ;
2019-03-29 21:42:58 +08:00
Mat out = net . forward ( " " ) ;
if ( useSoftmax )
{
LayerParams lp ;
Net netSoftmax ;
2020-03-22 23:50:15 +08:00
netSoftmax . addLayerToPrev ( " softmaxLayer " , " Softmax " , lp ) ;
2019-03-29 21:42:58 +08:00
netSoftmax . setPreferableBackend ( DNN_BACKEND_OPENCV ) ;
netSoftmax . setInput ( out ) ;
out = netSoftmax . forward ( ) ;
netSoftmax . setInput ( ref ) ;
ref = netSoftmax . forward ( ) ;
}
2023-08-04 16:28:51 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL )
{
l1 = std : : max ( l1 , 1.4e-3 ) ;
lInf = std : : max ( lInf , 8e-3 ) ;
}
2023-08-03 14:13:42 +08:00
normAssert ( ref , out , basename . c_str ( ) , l1 ? l1 : default_l1 , lInf ? lInf : default_lInf ) ;
2019-04-19 19:54:08 +08:00
if ( checkNoFallbacks )
expectNoFallbacksFromIE ( net ) ;
2018-09-11 02:07:51 +08:00
}
} ;
2019-07-05 02:15:04 +08:00
TEST_P ( Test_ONNX_layers , InstanceNorm )
{
if ( target = = DNN_TARGET_MYRIAD )
testONNXModels ( " instancenorm " , npy , 0 , 0 , false , false ) ;
else
testONNXModels ( " instancenorm " , npy ) ;
}
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_layers , MaxPooling )
{
2020-04-22 17:01:25 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2020020000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# endif
2019-10-02 19:05:41 +08:00
testONNXModels ( " maxpooling " , npy , 0 , 0 , false , false ) ;
2020-04-22 17:01:25 +08:00
}
TEST_P ( Test_ONNX_layers , MaxPooling_2 )
{
2019-10-02 19:05:41 +08:00
testONNXModels ( " two_maxpooling " , npy , 0 , 0 , false , false ) ;
2018-09-11 02:07:51 +08:00
}
TEST_P ( Test_ONNX_layers , Convolution )
{
testONNXModels ( " convolution " ) ;
2021-07-16 12:09:41 +08:00
testONNXModels ( " conv_asymmetric_pads " ) ;
2019-03-29 21:42:58 +08:00
}
2020-08-04 02:02:49 +08:00
TEST_P ( Test_ONNX_layers , Convolution_variable_weight )
{
if ( ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH | |
backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ) & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2020-08-09 17:13:20 +08:00
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ; // not supported
2021-01-22 06:16:56 +08:00
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ; // not supported
2020-08-04 02:02:49 +08:00
String basename = " conv_variable_w " ;
Net net = readNetFromONNX ( _tf ( " models/ " + basename + " .onnx " ) ) ;
ASSERT_FALSE ( net . empty ( ) ) ;
net . setPreferableBackend ( backend ) ;
net . setPreferableTarget ( target ) ;
for ( int i = 0 ; i < 2 ; i + + )
{
Mat input = blobFromNPY ( _tf ( " data/input_ " + basename + format ( " _%d " , i ) + " _0.npy " ) ) ;
Mat weights = blobFromNPY ( _tf ( " data/input_ " + basename + format ( " _%d " , i ) + " _1.npy " ) ) ;
Mat ref = blobFromNPY ( _tf ( " data/output_ " + basename + format ( " _%d " , i ) + " .npy " ) ) ;
net . setInput ( input , " 0 " ) ;
net . setInput ( weights , " 1 " ) ;
Mat out = net . forward ( ) ;
normAssert ( ref , out , " " , default_l1 , default_lInf ) ;
}
}
TEST_P ( Test_ONNX_layers , Convolution_variable_weight_bias )
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
// openvino/src/plugins/intel_myriad/common/src/ngraph/transformations/extract_dynamic_batch/slice_convolution.cpp:14 Expecting operation v1::GroupConvolution GroupConvolution_6904725 (Reshape_17[0]:f32{1,4,5,5}, Reshape_6904719[0]:f32{4,1,1,2,2}) -> (f32{1,4,4,4}) to have constant kernel, got Reshape_6904719[0]:f32{4,1,1,2,2}
// openvino\src\plugins\intel_myriad\common\src\ngraph\transformations\extract_dynamic_batch\slice_convolution.cpp:15 Expecting operation v1::GroupConvolution GroupConvolution_6904692 (Reshape_17[0]:f32{1,4,5,5}, Reshape_6904686[0]:f32{4,1,1,2,2}) -> (f32{1,4,4,4}) to have constant kernel, got Reshape_6904686[0]:f32{4,1,1,2,2}
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
// accuracy (depends on OpenCL version / HW)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# elif defined(INF_ENGINE_RELEASE)
2020-08-04 02:02:49 +08:00
if ( ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH | |
backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ) & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2021-03-20 19:20:02 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_CPU & &
getInferenceEngineCPUType ( ) = = CV_DNN_INFERENCE_ENGINE_CPU_TYPE_ARM_COMPUTE )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_ARM_CPU , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2022-03-31 03:03:38 +08:00
# endif
2021-03-20 19:20:02 +08:00
2022-04-20 00:40:25 +08:00
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ; // supports only <= 2 inputs
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ; // not supported
2020-08-04 02:02:49 +08:00
String basename = " conv_variable_wb " ;
Net net = readNetFromONNX ( _tf ( " models/ " + basename + " .onnx " ) ) ;
ASSERT_FALSE ( net . empty ( ) ) ;
net . setPreferableBackend ( backend ) ;
net . setPreferableTarget ( target ) ;
for ( int i = 0 ; i < 2 ; i + + )
{
Mat input = blobFromNPY ( _tf ( " data/input_ " + basename + format ( " _%d " , i ) + " _0.npy " ) ) ;
Mat weights = blobFromNPY ( _tf ( " data/input_ " + basename + format ( " _%d " , i ) + " _1.npy " ) ) ;
Mat bias = blobFromNPY ( _tf ( " data/input_ " + basename + format ( " _%d " , i ) + " _2.npy " ) ) ;
Mat ref = blobFromNPY ( _tf ( " data/output_ " + basename + format ( " _%d " , i ) + " .npy " ) ) ;
net . setInput ( input , " 0 " ) ;
net . setInput ( weights , " 1 " ) ;
net . setInput ( bias , " bias " ) ;
Mat out = net . forward ( ) ;
normAssert ( ref , out , " " , default_l1 , default_lInf ) ;
}
}
2020-07-20 17:04:20 +08:00
TEST_P ( Test_ONNX_layers , Gather )
{
2022-03-16 23:41:39 +08:00
testONNXModels ( " gather " , npy , 0 , 0 , false , false ) ;
2022-12-20 14:09:34 +08:00
}
TEST_P ( Test_ONNX_layers , Gather_Scalar )
{
2022-03-16 23:41:39 +08:00
testONNXModels ( " gather_scalar " , npy , 0 , 0 , false , false ) ;
}
TEST_P ( Test_ONNX_layers , GatherMulti )
{
2020-07-20 17:04:20 +08:00
// GPU plugin unsupported slice for constant
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL , CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2022-03-16 23:41:39 +08:00
testONNXModels ( " gather_multi " , npy , 0 , 0 , false , false ) ;
2020-07-20 17:04:20 +08:00
}
2023-09-28 11:50:21 +08:00
TEST_P ( Test_ONNX_layers , Gather_shared_indices ) {
testONNXModels ( " gather_shared_indices " , npy , 0 , 0 , false , false , 1 ) ;
}
2023-11-03 17:34:09 +08:00
TEST_P ( Test_ONNX_layers , Two_resizes_with_shared_subgraphs ) {
testONNXModels ( " two_resizes_with_shared_subgraphs " , npy , 0 , 0 , false , false , 3 , /*testShapes*/ false ) ;
}
2019-04-30 22:08:17 +08:00
TEST_P ( Test_ONNX_layers , Convolution3D )
{
2022-10-18 04:00:12 +08:00
if ( backend = = DNN_BACKEND_CUDA & & target = = DNN_TARGET_CUDA_FP16 )
{
// CUDA_FP16: cuDNN did not return a suitable algorithm for convolution.
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA_FP16 ) ;
}
2019-04-30 22:08:17 +08:00
testONNXModels ( " conv3d " ) ;
2020-11-14 06:22:10 +08:00
}
TEST_P ( Test_ONNX_layers , Convolution3D_bias )
{
2022-10-18 04:00:12 +08:00
if ( backend = = DNN_BACKEND_CUDA & & target = = DNN_TARGET_CUDA_FP16 )
{
// CUDA_FP16: cuDNN did not return a suitable algorithm for convolution.
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA_FP16 ) ;
}
2019-04-30 22:08:17 +08:00
testONNXModels ( " conv3d_bias " ) ;
2023-04-21 10:55:07 +08:00
testONNXModels ( " conv3d_depthwise_bias " ) ; // kernel 1x1
2019-04-30 22:08:17 +08:00
}
2019-03-29 21:42:58 +08:00
TEST_P ( Test_ONNX_layers , Two_convolution )
{
2019-06-14 23:17:02 +08:00
# if defined(INF_ENGINE_RELEASE)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target = = DNN_TARGET_MYRIAD
2019-03-29 21:42:58 +08:00
& & getInferenceEngineVPUType ( ) = = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
)
2019-12-02 21:16:06 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2019-03-29 21:42:58 +08:00
# endif
// Reference output values are in range [-0.855, 0.611]
2018-09-11 02:07:51 +08:00
testONNXModels ( " two_convolution " ) ;
}
2018-11-17 03:50:40 +08:00
TEST_P ( Test_ONNX_layers , Deconvolution )
{
2019-04-19 19:54:08 +08:00
testONNXModels ( " deconvolution " , npy , 0 , 0 , false , false ) ;
testONNXModels ( " two_deconvolution " , npy , 0 , 0 , false , false ) ;
testONNXModels ( " deconvolution_group " , npy , 0 , 0 , false , false ) ;
testONNXModels ( " deconvolution_output_shape " , npy , 0 , 0 , false , false ) ;
2020-11-21 20:05:20 +08:00
if ( target ! = DNN_TARGET_CUDA_FP16 ) // bug
testONNXModels ( " deconv_adjpad_2d " , npy , 0 , 0 , false , false ) ;
2018-11-17 03:50:40 +08:00
}
2019-07-12 20:51:44 +08:00
TEST_P ( Test_ONNX_layers , Deconvolution3D )
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
// [ GENERAL_ERROR ] openvino/src/plugins/intel_myriad/graph_transformer/src/frontend/frontend.cpp:592 Failed to compile layer "2":
// [ GENERAL_ERROR ] openvino/src/plugins/intel_myriad/graph_transformer/src/model/model.cpp:198 duplicateData error: while duplicating 2@weights Const data got different desc and content byte sizes (162 and 486 respectively)
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
}
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2021-11-30 20:08:35 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
2019-12-02 21:18:07 +08:00
{
2021-11-30 20:08:35 +08:00
// [ GENERAL_ERROR ] vpu/graph_transformer/src/frontend/frontend.cpp:439 Failed to compile layer "2":
// [ GENERAL_ERROR ] vpu/graph_transformer/src/model/model.cpp:198 duplicateData error: while duplicating 2@weights Const data got different desc and content byte sizes (162 and 486 respectively)
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
2019-12-02 21:18:07 +08:00
}
2019-07-12 20:51:44 +08:00
# endif
2021-11-30 20:08:35 +08:00
if ( backend = = DNN_BACKEND_OPENCV )
throw SkipTestException ( " OpenCV backend is not supported " ) ; // FIXIT use tags
2021-12-17 21:28:34 +08:00
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ;
2019-07-12 20:51:44 +08:00
testONNXModels ( " deconv3d " ) ;
2021-11-30 20:08:35 +08:00
}
TEST_P ( Test_ONNX_layers , Deconvolution3D_bias )
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
// [ GENERAL_ERROR ] openvino/src/plugins/intel_myriad/graph_transformer/src/frontend/frontend.cpp:592 Failed to compile layer "3":
// [ GENERAL_ERROR ] openvino/src/plugins/intel_myriad/graph_transformer/src/model/model.cpp:198 duplicateData error: while duplicating 3@weights Const data got different desc and content byte sizes (270 and 810 respectively)
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
}
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2021-11-30 20:08:35 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
// [ GENERAL_ERROR ] vpu/graph_transformer/src/frontend/frontend.cpp:439 Failed to compile layer "2":
// [ GENERAL_ERROR ] vpu/graph_transformer/src/model/model.cpp:198 duplicateData error: while duplicating 2@weights Const data got different desc and content byte sizes (162 and 486 respectively)
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
}
# endif
if ( backend = = DNN_BACKEND_OPENCV )
throw SkipTestException ( " OpenCV backend is not supported " ) ; // FIXIT use tags
2021-12-17 21:28:34 +08:00
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ;
2019-07-12 20:51:44 +08:00
testONNXModels ( " deconv3d_bias " ) ;
2021-11-30 20:08:35 +08:00
}
TEST_P ( Test_ONNX_layers , Deconvolution3D_pad )
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
// [ GENERAL_ERROR ] openvino/src/plugins/intel_myriad/graph_transformer/src/frontend/frontend.cpp:592 Failed to compile layer "3":
// [ GENERAL_ERROR ] openvino/src/plugins/intel_myriad/graph_transformer/src/model/model.cpp:198 duplicateData error: while duplicating 3@weights Const data got different desc and content byte sizes (108 and 432 respectively)
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
}
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2021-11-30 20:08:35 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
// [ GENERAL_ERROR ] vpu/graph_transformer/src/frontend/frontend.cpp:439 Failed to compile layer "2":
// [ GENERAL_ERROR ] vpu/graph_transformer/src/model/model.cpp:198 duplicateData error: while duplicating 2@weights Const data got different desc and content byte sizes (162 and 486 respectively)
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
}
# endif
if ( backend = = DNN_BACKEND_OPENCV )
throw SkipTestException ( " OpenCV backend is not supported " ) ; // FIXIT use tags
2021-12-17 21:28:34 +08:00
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ;
2019-07-12 20:51:44 +08:00
testONNXModels ( " deconv3d_pad " ) ;
2021-11-30 20:08:35 +08:00
}
TEST_P ( Test_ONNX_layers , Deconvolution3D_adjpad )
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
// [ GENERAL_ERROR ] openvino/src/plugins/intel_myriad/graph_transformer/src/frontend/frontend.cpp:592 Failed to compile layer "3":
// [ GENERAL_ERROR ] openvino/src/plugins/intel_myriad/graph_transformer/src/model/model.cpp:198 duplicateData error: while duplicating 3@weights Const data got different desc and content byte sizes (90 and 180 respectively)
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
}
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2021-11-30 20:08:35 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
// [ GENERAL_ERROR ] vpu/graph_transformer/src/frontend/frontend.cpp:439 Failed to compile layer "2":
// [ GENERAL_ERROR ] vpu/graph_transformer/src/model/model.cpp:198 duplicateData error: while duplicating 2@weights Const data got different desc and content byte sizes (162 and 486 respectively)
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
}
# endif
if ( backend = = DNN_BACKEND_OPENCV )
throw SkipTestException ( " OpenCV backend is not supported " ) ; // FIXIT use tags
2021-12-17 21:28:34 +08:00
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ;
2019-07-12 20:51:44 +08:00
testONNXModels ( " deconv3d_adjpad " ) ;
}
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_layers , Dropout )
{
testONNXModels ( " dropout " ) ;
}
TEST_P ( Test_ONNX_layers , Linear )
{
if ( backend = = DNN_BACKEND_OPENCV & & target = = DNN_TARGET_OPENCL_FP16 )
2019-06-15 20:17:25 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_OPENCL_FP16 ) ;
2018-09-11 02:07:51 +08:00
testONNXModels ( " linear " ) ;
}
TEST_P ( Test_ONNX_layers , ReLU )
{
testONNXModels ( " ReLU " ) ;
}
2021-09-10 19:07:16 +08:00
TEST_P ( Test_ONNX_layers , PReLU )
{
testONNXModels ( " PReLU_slope " ) ;
}
2019-07-04 13:56:00 +08:00
TEST_P ( Test_ONNX_layers , Clip )
{
testONNXModels ( " clip " , npy ) ;
}
2022-06-22 19:21:16 +08:00
TEST_P ( Test_ONNX_layers , Clip_init )
{
testONNXModels ( " clip_init_min_max " ) ;
testONNXModels ( " clip_init_min " ) ;
testONNXModels ( " clip_init_max " ) ;
}
2020-03-17 22:31:01 +08:00
TEST_P ( Test_ONNX_layers , Shape )
{
testONNXModels ( " shape_of_constant " ) ;
}
2019-07-20 00:18:34 +08:00
TEST_P ( Test_ONNX_layers , ReduceMean )
{
testONNXModels ( " reduce_mean " ) ;
2020-04-07 22:12:18 +08:00
testONNXModels ( " reduce_mean_axis1 " ) ;
testONNXModels ( " reduce_mean_axis2 " ) ;
2019-07-20 00:18:34 +08:00
}
2020-08-15 00:49:42 +08:00
TEST_P ( Test_ONNX_layers , ReduceSum )
{
testONNXModels ( " reduce_sum " ) ;
2022-07-13 13:46:16 +08:00
testONNXModels ( " reduce_sum_axis_dynamic_batch " ) ;
2020-08-15 00:49:42 +08:00
}
2020-11-17 19:45:36 +08:00
TEST_P ( Test_ONNX_layers , ReduceMax )
2020-09-09 15:40:02 +08:00
{
testONNXModels ( " reduce_max " ) ;
2022-03-30 19:06:18 +08:00
}
TEST_P ( Test_ONNX_layers , ReduceMax_axis_0 )
{
2020-11-17 19:45:36 +08:00
testONNXModels ( " reduce_max_axis_0 " ) ;
2022-03-30 19:06:18 +08:00
}
TEST_P ( Test_ONNX_layers , ReduceMax_axis_1 )
{
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
// [ GENERAL_ERROR ] AssertionFailed: !out.networkInputs.empty()
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# endif
2020-11-17 19:45:36 +08:00
testONNXModels ( " reduce_max_axis_1 " ) ;
2020-09-09 15:40:02 +08:00
}
2021-09-22 20:17:37 +08:00
TEST_P ( Test_ONNX_layers , Min )
{
testONNXModels ( " min " , npy , 0 , 0 , false , true , 2 ) ;
}
2021-12-07 00:33:59 +08:00
TEST_P ( Test_ONNX_layers , ArgLayer )
{
if ( backend ! = DNN_BACKEND_OPENCV | | target ! = DNN_TARGET_CPU )
throw SkipTestException ( " Only CPU is supported " ) ; // FIXIT use tags
testONNXModels ( " argmax " ) ;
testONNXModels ( " argmin " ) ;
}
2020-09-18 21:01:14 +08:00
TEST_P ( Test_ONNX_layers , Scale )
{
2022-04-05 06:02:40 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
// accuracy (inf/nan)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
// accuracy
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
// IE exception: mkldnn_node.cpp:238 Ngraph operation Reshape with name ReduceMean_0 has dynamic output shape on 0 port, but CPU plug-in supports only static shape
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2022-03-30 19:06:18 +08:00
// Ngraph operation Reshape with name ReduceMean_0 has dynamic output shape on 0 port, but CPU plug-in supports only static shape
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL_FP16 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# endif
2020-09-18 21:01:14 +08:00
testONNXModels ( " scale " ) ;
2022-03-30 19:06:18 +08:00
}
TEST_P ( Test_ONNX_layers , Scale_broadcast )
{
2022-04-20 00:40:25 +08:00
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ; // doesn't support broadcasting
2021-08-18 23:37:35 +08:00
testONNXModels ( " scale_broadcast " , npy , 0 , 0 , false , true , 3 ) ;
2022-03-30 19:06:18 +08:00
}
TEST_P ( Test_ONNX_layers , Scale_broadcast_mid )
{
2022-04-20 00:40:25 +08:00
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ; // doesn't support broadcasting
2021-09-09 19:41:40 +08:00
testONNXModels ( " scale_broadcast_mid " , npy , 0 , 0 , false , true , 2 ) ;
2020-09-18 21:01:14 +08:00
}
2019-07-20 00:18:34 +08:00
TEST_P ( Test_ONNX_layers , ReduceMean3D )
{
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target ! = DNN_TARGET_CPU )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ; // Only CPU on DLIE backend is supported
2019-12-02 21:18:07 +08:00
else if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target ! = DNN_TARGET_CPU )
2019-12-02 21:16:06 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ; // Only CPU on DLIE backend is supported
2021-11-30 20:08:35 +08:00
# endif
if ( backend = = DNN_BACKEND_OPENCV & & target ! = DNN_TARGET_CPU )
throw SkipTestException ( " Only CPU is supported " ) ; // FIXIT use tags
2019-12-02 21:18:07 +08:00
2021-12-17 21:28:34 +08:00
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ;
2019-07-20 00:18:34 +08:00
testONNXModels ( " reduce_mean3d " ) ;
}
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_layers , MaxPooling_Sigmoid )
{
testONNXModels ( " maxpooling_sigmoid " ) ;
}
2020-03-14 19:05:49 +08:00
TEST_P ( Test_ONNX_layers , Cast )
{
testONNXModels ( " cast " ) ;
}
2020-08-24 15:46:53 +08:00
TEST_P ( Test_ONNX_layers , Power )
{
testONNXModels ( " pow2 " , npy , 0 , 0 , false , false ) ;
}
2021-02-21 00:46:00 +08:00
TEST_P ( Test_ONNX_layers , Exp )
{
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
testONNXModels ( " exp " ) ;
}
2021-10-14 22:38:57 +08:00
TEST_P ( Test_ONNX_layers , Elementwise_Ceil )
{
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2021-10-14 22:38:57 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2021-11-30 20:08:35 +08:00
# endif
2021-10-14 22:38:57 +08:00
testONNXModels ( " ceil " ) ;
}
TEST_P ( Test_ONNX_layers , Elementwise_Floor )
{
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2021-10-14 22:38:57 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2021-11-30 20:08:35 +08:00
# endif
2021-10-14 22:38:57 +08:00
testONNXModels ( " floor " ) ;
}
TEST_P ( Test_ONNX_layers , Elementwise_Log )
{
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2021-10-14 22:38:57 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2021-11-30 20:08:35 +08:00
# endif
2021-10-14 22:38:57 +08:00
testONNXModels ( " log " ) ;
}
TEST_P ( Test_ONNX_layers , Elementwise_Round )
{
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2021-10-14 22:38:57 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2021-11-30 20:08:35 +08:00
# endif
2021-10-14 22:38:57 +08:00
testONNXModels ( " round " ) ;
}
TEST_P ( Test_ONNX_layers , Elementwise_Sqrt )
{
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2021-10-14 22:38:57 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
testONNXModels ( " sqrt " ) ;
2021-11-30 20:08:35 +08:00
# endif
2021-10-14 22:38:57 +08:00
}
TEST_P ( Test_ONNX_layers , Elementwise_not )
{
2022-12-03 13:46:23 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2021-10-14 22:38:57 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2021-11-30 20:08:35 +08:00
# endif
2021-10-14 22:38:57 +08:00
testONNXModels ( " not " ) ;
}
2021-11-30 20:08:35 +08:00
TEST_P ( Test_ONNX_layers , Compare_EQ )
2021-10-14 22:38:57 +08:00
{
2022-12-03 13:46:23 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2022-04-05 06:02:40 +08:00
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2021-10-14 22:38:57 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2021-11-30 20:08:35 +08:00
# endif
2022-04-05 06:02:40 +08:00
2021-10-14 22:38:57 +08:00
testONNXModels ( " equal " ) ;
2021-11-30 20:08:35 +08:00
}
TEST_P ( Test_ONNX_layers , Compare_GT )
{
2022-12-03 13:46:23 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2022-04-05 06:02:40 +08:00
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2021-11-30 20:08:35 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
# endif
2022-04-05 06:02:40 +08:00
2021-10-14 22:38:57 +08:00
testONNXModels ( " greater " ) ;
2021-11-30 20:08:35 +08:00
}
2023-10-13 16:56:18 +08:00
TEST_P ( Test_ONNX_layers , Greater_input_dtype_int64 ) {
testONNXModels ( " greater_input_dtype_int64 " ) ;
}
2021-11-30 20:08:35 +08:00
TEST_P ( Test_ONNX_layers , Compare_LT )
{
2022-12-03 13:46:23 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2022-04-05 06:02:40 +08:00
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2021-11-30 20:08:35 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
# endif
2022-04-05 06:02:40 +08:00
2021-10-14 22:38:57 +08:00
testONNXModels ( " less " ) ;
}
2022-10-08 15:51:40 +08:00
TEST_P ( Test_ONNX_layers , Compare_GTorEQ )
{
testONNXModels ( " greater_or_equal " ) ;
}
TEST_P ( Test_ONNX_layers , Compare_LEorEQ )
{
testONNXModels ( " less_or_equal " ) ;
}
2021-11-30 20:08:35 +08:00
TEST_P ( Test_ONNX_layers , CompareSameDims_EQ )
2021-10-14 22:38:57 +08:00
{
2022-12-03 13:46:23 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2022-04-05 06:02:40 +08:00
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2021-10-14 22:38:57 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2021-11-30 20:08:35 +08:00
# endif
2022-04-05 06:02:40 +08:00
2021-10-14 22:38:57 +08:00
testONNXModels ( " equal_same_dims " , npy , 0 , 0 , false , true , 2 ) ;
2021-11-30 20:08:35 +08:00
}
TEST_P ( Test_ONNX_layers , CompareSameDims_GT )
{
2022-12-03 13:46:23 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2022-04-05 06:02:40 +08:00
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2021-11-30 20:08:35 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
# endif
2022-04-05 06:02:40 +08:00
2021-10-14 22:38:57 +08:00
testONNXModels ( " greater_same_dims " , npy , 0 , 0 , false , true , 2 ) ;
2021-11-30 20:08:35 +08:00
}
TEST_P ( Test_ONNX_layers , CompareSameDims_LT )
{
2022-12-03 13:46:23 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2022-04-05 06:02:40 +08:00
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2021-11-30 20:08:35 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
# endif
2022-04-05 06:02:40 +08:00
2021-10-14 22:38:57 +08:00
testONNXModels ( " less_same_dims " , npy , 0 , 0 , false , true , 2 ) ;
}
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_layers , Concatenation )
{
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
2019-06-15 20:17:25 +08:00
{
2019-12-02 21:16:06 +08:00
if ( target = = DNN_TARGET_OPENCL_FP16 ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( target = = DNN_TARGET_OPENCL ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2019-06-15 20:17:25 +08:00
}
2018-09-11 02:07:51 +08:00
testONNXModels ( " concatenation " ) ;
2021-07-01 00:25:42 +08:00
testONNXModels ( " concat_const_blobs " ) ;
2018-09-11 02:07:51 +08:00
}
2023-10-18 14:21:40 +08:00
TEST_P ( Test_ONNX_layers , CumSumExclusiveInplace )
{
testONNXModels ( " cumsum_exclusive_inplace " ) ;
}
2024-04-17 14:38:21 +08:00
TEST_P ( Test_ONNX_layers , Range )
{
testONNXModels ( " range_float " ) ;
testONNXModels ( " range_float_negative " ) ;
}
2019-06-22 15:13:28 +08:00
TEST_P ( Test_ONNX_layers , Eltwise3D )
{
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target ! = DNN_TARGET_CPU )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ; // Only CPU on DLIE backend is supported
2021-12-03 20:32:49 +08:00
else if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target ! = DNN_TARGET_CPU )
2019-12-02 21:16:06 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ; // Only CPU on DLIE backend is supported
2021-11-30 20:08:35 +08:00
# endif
2019-06-22 15:13:28 +08:00
testONNXModels ( " eltwise3d " ) ;
}
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_layers , AveragePooling )
{
testONNXModels ( " average_pooling " ) ;
}
2019-04-30 22:08:17 +08:00
TEST_P ( Test_ONNX_layers , MaxPooling3D )
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
// accuracy
if ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
// IE exception: [ GENERAL_ERROR ] AssertionFailed: !expired()
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
}
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2021-11-30 20:08:35 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
2019-12-02 21:18:07 +08:00
{
2021-11-30 20:08:35 +08:00
// accuracy
if ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
// IE exception: [ GENERAL_ERROR ] AssertionFailed: !expired()
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
2019-12-02 21:18:07 +08:00
}
2019-06-14 23:17:02 +08:00
# endif
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target ! = DNN_TARGET_CPU )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ; // Only CPU on DLIE backend is supported
2019-12-02 21:18:07 +08:00
else if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target ! = DNN_TARGET_CPU )
2019-12-02 21:16:06 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ; // Only CPU on DLIE backend is supported
2021-11-30 20:08:35 +08:00
# endif
if ( backend = = DNN_BACKEND_OPENCV & & target ! = DNN_TARGET_CPU )
throw SkipTestException ( " Only CPU is supported " ) ; // FIXIT use tags
2021-12-17 21:28:34 +08:00
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ;
2019-10-02 19:05:41 +08:00
testONNXModels ( " max_pool3d " , npy , 0 , 0 , false , false ) ;
2019-04-30 22:08:17 +08:00
}
TEST_P ( Test_ONNX_layers , AvePooling3D )
{
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target ! = DNN_TARGET_CPU )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ; // Only CPU on DLIE backend is supported
2019-12-02 21:18:07 +08:00
else if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target ! = DNN_TARGET_CPU )
2019-12-02 21:16:06 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ; // Only CPU on DLIE backend is supported
2021-11-30 20:08:35 +08:00
# endif
if ( backend = = DNN_BACKEND_OPENCV & & target ! = DNN_TARGET_CPU )
throw SkipTestException ( " Only CPU is supported " ) ; // FIXIT use tags
2021-12-17 21:28:34 +08:00
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ;
2019-04-30 22:08:17 +08:00
testONNXModels ( " ave_pool3d " ) ;
}
2019-07-12 01:13:52 +08:00
TEST_P ( Test_ONNX_layers , PoolConv3D )
{
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target ! = DNN_TARGET_CPU )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ; // Only CPU on DLIE backend is supported
2019-12-02 21:18:07 +08:00
else if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target ! = DNN_TARGET_CPU )
2019-12-02 21:16:06 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ; // Only CPU on DLIE backend is supported
2021-11-30 20:08:35 +08:00
# endif
if ( backend = = DNN_BACKEND_OPENCV & & target ! = DNN_TARGET_CPU )
throw SkipTestException ( " Only CPU is supported " ) ; // FIXIT use tags
2021-12-17 21:28:34 +08:00
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ;
2022-10-18 04:00:12 +08:00
if ( backend = = DNN_BACKEND_CUDA & & target = = DNN_TARGET_CUDA_FP16 )
{
// CUDA_FP16: cuDNN did not return a suitable algorithm for convolution.
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA_FP16 ) ;
}
2019-07-12 01:13:52 +08:00
testONNXModels ( " pool_conv_3d " ) ;
}
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_layers , BatchNormalization )
{
testONNXModels ( " batch_norm " ) ;
}
2019-04-29 15:29:34 +08:00
TEST_P ( Test_ONNX_layers , BatchNormalization3D )
{
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
2019-06-15 20:17:25 +08:00
{
2019-12-02 21:16:06 +08:00
if ( target = = DNN_TARGET_OPENCL_FP16 ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( target = = DNN_TARGET_OPENCL ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2019-06-15 20:17:25 +08:00
}
2019-04-29 15:29:34 +08:00
testONNXModels ( " batch_norm_3d " ) ;
}
2020-03-20 00:52:36 +08:00
TEST_P ( Test_ONNX_layers , BatchNormalizationUnfused )
{
2021-03-24 17:28:05 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021030000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_CPU )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_CPU , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ; // exception
# endif
2020-03-20 00:52:36 +08:00
testONNXModels ( " frozenBatchNorm2d " ) ;
}
2020-05-12 20:33:57 +08:00
TEST_P ( Test_ONNX_layers , BatchNormalizationSubgraph )
{
2021-03-24 17:28:05 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021030000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_CPU )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_CPU , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ; // exception
# endif
2020-05-12 20:33:57 +08:00
testONNXModels ( " batch_norm_subgraph " ) ;
}
2021-02-01 17:38:33 +08:00
TEST_P ( Test_ONNX_layers , NormalizeFusionSubgraph )
{
testONNXModels ( " normalize_fusion " ) ;
}
2018-10-09 03:18:41 +08:00
TEST_P ( Test_ONNX_layers , Transpose )
{
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
2019-06-15 20:17:25 +08:00
{
2019-12-02 21:16:06 +08:00
if ( target = = DNN_TARGET_OPENCL_FP16 ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( target = = DNN_TARGET_OPENCL ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2019-06-15 20:17:25 +08:00
}
2018-10-09 03:18:41 +08:00
testONNXModels ( " transpose " ) ;
}
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_layers , Multiplication )
{
2019-06-15 20:17:25 +08:00
if ( backend = = DNN_BACKEND_OPENCV & & target = = DNN_TARGET_OPENCL_FP16 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_OPENCL_FP16 ) ;
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2018-09-11 02:07:51 +08:00
testONNXModels ( " mul " ) ;
}
2022-12-20 17:46:48 +08:00
TEST_P ( Test_ONNX_layers , MatMul_2d )
2020-04-07 22:12:18 +08:00
{
testONNXModels ( " matmul_2d " ) ;
2022-12-20 17:46:48 +08:00
}
TEST_P ( Test_ONNX_layers , MatMul_3d )
{
2020-04-07 22:12:18 +08:00
testONNXModels ( " matmul_3d " ) ;
2022-12-20 17:46:48 +08:00
}
TEST_P ( Test_ONNX_layers , MatMul_4d )
{
2020-04-07 22:12:18 +08:00
testONNXModels ( " matmul_4d " ) ;
}
2022-12-20 17:46:48 +08:00
TEST_P ( Test_ONNX_layers , MatMul_2d_init )
2022-11-09 16:23:42 +08:00
{
testONNXModels ( " matmul_2d_init " ) ;
2022-12-20 17:46:48 +08:00
}
TEST_P ( Test_ONNX_layers , MatMul_3d_init )
{
2022-11-09 16:23:42 +08:00
testONNXModels ( " matmul_3d_init " ) ;
2022-12-20 17:46:48 +08:00
}
TEST_P ( Test_ONNX_layers , MatMul_4d_init )
{
2022-11-09 16:23:42 +08:00
testONNXModels ( " matmul_4d_init " ) ;
2022-12-20 17:46:48 +08:00
}
TEST_P ( Test_ONNX_layers , MatMul_init_2 )
{
2022-11-27 17:32:41 +08:00
testONNXModels ( " matmul_init_2 " ) ;
2022-12-20 17:46:48 +08:00
}
TEST_P ( Test_ONNX_layers , MatMul_init_bcast )
{
2022-12-15 10:36:08 +08:00
testONNXModels ( " matmul_init_bcast " ) ;
2022-11-09 16:23:42 +08:00
}
2024-08-15 16:10:40 +08:00
TEST_P ( Test_ONNX_layers , MatMul_bcast_3dx2d ) {
testONNXModels ( " matmul_bcast " ) ;
}
2020-08-12 20:03:46 +08:00
TEST_P ( Test_ONNX_layers , MatMulAdd )
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
// accuracy
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_CPU )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_CPU , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021010000)
2020-08-12 20:03:46 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2022-03-31 03:03:38 +08:00
# endif
2020-08-12 20:03:46 +08:00
if ( backend = = DNN_BACKEND_OPENCV & & target = = DNN_TARGET_OPENCL_FP16 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_OPENCL_FP16 ) ;
testONNXModels ( " matmul_add " ) ;
}
2020-04-07 22:12:18 +08:00
TEST_P ( Test_ONNX_layers , Expand )
{
2021-09-09 19:41:40 +08:00
testONNXModels ( " expand " ) ;
2023-09-27 14:28:52 +08:00
}
TEST_P ( Test_ONNX_layers , ExpandIdentity ) {
2021-09-09 19:41:40 +08:00
testONNXModels ( " expand_identity " ) ;
2023-09-27 14:28:52 +08:00
}
TEST_P ( Test_ONNX_layers , ExpandBatch ) {
2020-04-07 22:12:18 +08:00
testONNXModels ( " expand_batch " ) ;
2023-09-27 14:28:52 +08:00
}
TEST_P ( Test_ONNX_layers , ExpandChannels ) {
2020-04-07 22:12:18 +08:00
testONNXModels ( " expand_channels " ) ;
2023-09-27 14:28:52 +08:00
}
TEST_P ( Test_ONNX_layers , ExpandNegBatch ) {
2020-08-15 00:49:42 +08:00
testONNXModels ( " expand_neg_batch " ) ;
2020-04-07 22:12:18 +08:00
}
TEST_P ( Test_ONNX_layers , ExpandHW )
{
2020-05-12 20:50:31 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2020-04-07 22:12:18 +08:00
testONNXModels ( " expand_hw " ) ;
}
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_layers , Constant )
{
2019-06-25 02:55:32 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target = = DNN_TARGET_MYRIAD
2019-03-29 21:42:58 +08:00
& & getInferenceEngineVPUType ( ) = = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X )
2019-12-02 21:16:06 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
2019-03-29 21:42:58 +08:00
# endif
2018-09-11 02:07:51 +08:00
testONNXModels ( " constant " ) ;
}
2018-10-31 23:24:05 +08:00
TEST_P ( Test_ONNX_layers , Padding )
{
2019-06-14 23:17:02 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
testONNXModels ( " padding " , npy , 0 , 0 , false , false ) ;
# else
2018-10-31 23:24:05 +08:00
testONNXModels ( " padding " ) ;
2019-06-14 23:17:02 +08:00
# endif
2018-10-31 23:24:05 +08:00
}
2019-02-22 00:48:46 +08:00
TEST_P ( Test_ONNX_layers , Resize )
{
testONNXModels ( " resize_nearest " ) ;
2023-06-14 19:21:28 +08:00
testONNXModels ( " tf_half_pixel_for_nn " ) ;
2020-02-18 03:29:37 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
testONNXModels ( " resize_bilinear " ) ;
2019-02-22 00:48:46 +08:00
}
2020-02-13 20:02:35 +08:00
TEST_P ( Test_ONNX_layers , ResizeUnfused )
{
2020-03-02 20:45:29 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2020-03-01 17:39:15 +08:00
testONNXModels ( " upsample_unfused_torch1.2 " ) ;
2020-02-13 20:02:35 +08:00
testONNXModels ( " upsample_unfused_opset9_torch1.4 " ) ;
testONNXModels ( " resize_nearest_unfused_opset11_torch1.4 " ) ;
testONNXModels ( " resize_nearest_unfused_opset11_torch1.3 " ) ;
2020-04-10 14:36:09 +08:00
testONNXModels ( " resize_bilinear_unfused_opset11_torch1.4 " ) ;
2020-02-13 20:02:35 +08:00
}
2020-03-20 00:52:36 +08:00
TEST_P ( Test_ONNX_layers , ResizeUnfusedTwoInputs )
{
2023-10-05 17:51:55 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2023000000)
2020-03-20 00:52:36 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2023-10-05 17:51:55 +08:00
# endif
2020-03-20 00:52:36 +08:00
testONNXModels ( " upsample_unfused_two_inputs_opset9_torch1.4 " , npy , 0 , 0 , false , true , 2 ) ;
testONNXModels ( " upsample_unfused_two_inputs_opset11_torch1.4 " , npy , 0 , 0 , false , true , 2 ) ;
}
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_layers , MultyInputs )
{
2020-03-06 04:53:50 +08:00
testONNXModels ( " multy_inputs " , npy , 0 , 0 , false , true , 2 ) ;
}
2018-09-11 02:07:51 +08:00
2020-03-06 04:53:50 +08:00
TEST_P ( Test_ONNX_layers , Broadcast )
{
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
testONNXModels ( " channel_broadcast " , npy , 0 , 0 , false , true , 2 ) ;
2018-09-11 02:07:51 +08:00
}
2020-03-17 22:31:01 +08:00
TEST_P ( Test_ONNX_layers , DynamicResize )
{
2020-11-20 19:14:00 +08:00
testONNXModels ( " dynamic_resize_9 " , npy , 0 , 0 , false , true , 2 ) ;
testONNXModels ( " dynamic_resize_10 " , npy , 0 , 0 , false , true , 2 ) ;
testONNXModels ( " dynamic_resize_11 " , npy , 0 , 0 , false , true , 2 ) ;
2022-01-28 22:55:01 +08:00
testONNXModels ( " dynamic_resize_13 " , npy , 0 , 0 , false , true , 2 ) ;
2020-11-20 19:14:00 +08:00
testONNXModels ( " dynamic_resize_scale_9 " , npy , 0 , 0 , false , true , 2 ) ;
testONNXModels ( " dynamic_resize_scale_10 " , npy , 0 , 0 , false , true , 2 ) ;
testONNXModels ( " dynamic_resize_scale_11 " , npy , 0 , 0 , false , true , 2 ) ;
2022-01-28 22:55:01 +08:00
testONNXModels ( " dynamic_resize_scale_13 " , npy , 0 , 0 , false , true , 2 ) ;
testONNXModels ( " resize_size_opset11 " ) ;
testONNXModels ( " resize_size_opset13 " ) ;
2020-03-17 22:31:01 +08:00
}
2021-09-01 18:10:05 +08:00
TEST_P ( Test_ONNX_layers , Resize_HumanSeg )
{
testONNXModels ( " resize_humanseg " ) ;
}
2019-11-09 19:11:09 +08:00
TEST_P ( Test_ONNX_layers , Div )
{
const String model = _tf ( " models/div.onnx " ) ;
Net net = readNetFromONNX ( model ) ;
ASSERT_FALSE ( net . empty ( ) ) ;
net . setPreferableBackend ( backend ) ;
net . setPreferableTarget ( target ) ;
2019-12-24 18:34:33 +08:00
// Reference output values range is -68.80928, 2.991873. So to avoid computational
// difference for FP16 we'll perform reversed division (just swap inputs).
Mat inp1 = blobFromNPY ( _tf ( " data/input_div_1.npy " ) ) ;
Mat inp2 = blobFromNPY ( _tf ( " data/input_div_0.npy " ) ) ;
2019-11-09 19:11:09 +08:00
Mat ref = blobFromNPY ( _tf ( " data/output_div.npy " ) ) ;
2019-12-24 18:34:33 +08:00
cv : : divide ( 1.0 , ref , ref ) ;
2019-11-09 19:11:09 +08:00
checkBackend ( & inp1 , & ref ) ;
net . setInput ( inp1 , " 0 " ) ;
net . setInput ( inp2 , " 1 " ) ;
Mat out = net . forward ( ) ;
normAssert ( ref , out , " " , default_l1 , default_lInf ) ;
2022-07-13 11:25:27 +08:00
2022-10-18 04:00:12 +08:00
// NaryEltwise layer suuports only CPU for now
testONNXModels ( " div_test_1x1 " , npy , 0 , 0 , false , false , 2 ) ;
2019-11-09 19:11:09 +08:00
}
2018-12-12 22:36:17 +08:00
TEST_P ( Test_ONNX_layers , DynamicReshape )
{
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
2020-03-17 22:31:01 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2018-12-12 22:36:17 +08:00
testONNXModels ( " dynamic_reshape " ) ;
2020-03-04 16:27:10 +08:00
testONNXModels ( " dynamic_reshape_opset_11 " ) ;
testONNXModels ( " flatten_by_prod " ) ;
2020-03-14 19:05:49 +08:00
testONNXModels ( " flatten_const " ) ;
2018-12-12 22:36:17 +08:00
}
2018-09-11 02:07:51 +08:00
2019-02-22 00:48:46 +08:00
TEST_P ( Test_ONNX_layers , Reshape )
{
testONNXModels ( " unsqueeze " ) ;
2021-09-17 20:10:57 +08:00
testONNXModels ( " unsqueeze_opset_13 " ) ;
2019-02-22 00:48:46 +08:00
}
2022-10-14 16:46:25 +08:00
TEST_P ( Test_ONNX_layers , Unsqueeze_Neg_Axes )
{
testONNXModels ( " unsqueeze_neg_axes " ) ;
}
2019-11-09 19:11:09 +08:00
TEST_P ( Test_ONNX_layers , Squeeze )
{
2020-03-22 21:04:30 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2019-11-09 19:11:09 +08:00
testONNXModels ( " squeeze " ) ;
2022-08-30 09:50:29 +08:00
testONNXModels ( " squeeze_axes_op13 " ) ;
2019-11-09 19:11:09 +08:00
}
TEST_P ( Test_ONNX_layers , ReduceL2 )
{
testONNXModels ( " reduceL2 " ) ;
2020-04-07 22:12:18 +08:00
testONNXModels ( " reduceL2_subgraph " ) ;
2020-03-28 23:53:57 +08:00
testONNXModels ( " reduceL2_subgraph_2 " ) ;
2021-09-09 19:41:40 +08:00
testONNXModels ( " reduceL2_subgraph2_2 " ) ;
2019-11-09 19:11:09 +08:00
}
2020-02-15 19:12:20 +08:00
TEST_P ( Test_ONNX_layers , Split )
{
2023-10-05 17:51:55 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2023000000)
2020-02-15 19:12:20 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2023-10-05 17:51:55 +08:00
# endif
2023-06-07 21:01:42 +08:00
testONNXModels ( " split_0 " ) ;
2020-02-15 19:12:20 +08:00
testONNXModels ( " split_1 " ) ;
testONNXModels ( " split_2 " ) ;
testONNXModels ( " split_3 " ) ;
testONNXModels ( " split_4 " ) ;
2023-04-11 16:18:50 +08:00
testONNXModels ( " split_5 " ) ;
testONNXModels ( " split_6 " ) ;
2021-09-09 19:41:40 +08:00
testONNXModels ( " split_neg_axis " ) ;
2020-02-15 19:12:20 +08:00
}
2022-10-18 04:00:12 +08:00
// Mul inside with 0-d tensor, output should be A x 1, but is 1 x A. PR #22652
TEST_P ( Test_ONNX_layers , DISABLED_Split_sizes_0d )
{
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
testONNXModels ( " split_sizes " ) ;
}
2019-06-08 21:52:40 +08:00
TEST_P ( Test_ONNX_layers , Slice )
{
2019-06-14 23:17:02 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
testONNXModels ( " slice " , npy , 0 , 0 , false , false ) ;
# else
2019-06-08 21:52:40 +08:00
testONNXModels ( " slice " ) ;
2021-09-09 19:41:40 +08:00
testONNXModels ( " slice_neg_starts " ) ;
2020-03-17 22:31:01 +08:00
testONNXModels ( " slice_opset_11 " ) ;
2022-12-01 17:41:54 +08:00
testONNXModels ( " slice_neg_steps " , pb ) ;
2019-06-14 23:17:02 +08:00
# endif
2019-06-08 21:52:40 +08:00
}
2021-03-26 19:04:57 +08:00
TEST_P ( Test_ONNX_layers , Slice_Steps_2DInput )
{
testONNXModels ( " slice_opset_11_steps_2d " ) ;
}
TEST_P ( Test_ONNX_layers , Slice_Steps_3DInput )
{
testONNXModels ( " slice_opset_11_steps_3d " ) ;
}
TEST_P ( Test_ONNX_layers , Slice_Steps_4DInput )
{
testONNXModels ( " slice_opset_11_steps_4d " ) ;
}
TEST_P ( Test_ONNX_layers , Slice_Steps_5DInput )
{
testONNXModels ( " slice_opset_11_steps_5d " ) ;
}
2022-09-22 14:40:39 +08:00
TEST_P ( Test_ONNX_layers , Slice_Nonseq_Axes )
{
testONNXModels ( " slice_nonseq_axes " ) ;
testONNXModels ( " slice_nonseq_axes_steps " ) ;
testONNXModels ( " slice_nonseq_miss_axes_steps " ) ;
}
TEST_P ( Test_ONNX_layers , Slice_Neg_Axes )
{
testONNXModels ( " slice_neg_axes " ) ;
testONNXModels ( " slice_neg_axes_steps " ) ;
testONNXModels ( " slice_neg_miss_axes_steps " ) ;
}
2019-05-20 15:46:09 +08:00
TEST_P ( Test_ONNX_layers , Softmax )
{
testONNXModels ( " softmax " ) ;
2019-04-30 20:33:32 +08:00
testONNXModels ( " log_softmax " , npy , 0 , 0 , false , false ) ;
2020-01-06 19:03:05 +08:00
testONNXModels ( " softmax_unfused " ) ;
2019-05-20 15:46:09 +08:00
}
2019-07-28 03:10:13 +08:00
TEST_P ( Test_ONNX_layers , Split_EltwiseMax )
{
2023-10-05 17:51:55 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2023000000)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2023-10-05 17:51:55 +08:00
# endif
2019-07-28 03:10:13 +08:00
testONNXModels ( " split_max " ) ;
}
2021-08-13 20:41:00 +08:00
TEST_P ( Test_ONNX_layers , LSTM_Activations )
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
// IE exception: Node Block1326/lstm/reshape_0/permute was not assigned on any pointed device
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2021-11-26 03:56:27 +08:00
// IE Exception: Ngraph operation Reshape with name Block1237_Output_0_before_reshape has dynamic output shape on 0 port, but CPU plug-in supports only static shape
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# endif
2021-08-13 20:41:00 +08:00
testONNXModels ( " lstm_cntk_tanh " , pb , 0 , 0 , false , false ) ;
}
2022-03-15 14:14:05 +08:00
// disabled due to poor handling of 1-d mats
TEST_P ( Test_ONNX_layers , DISABLED_LSTM )
2020-03-16 03:33:05 +08:00
{
2020-03-22 21:04:30 +08:00
testONNXModels ( " lstm " , npy , 0 , 0 , false , false ) ;
2020-03-16 03:33:05 +08:00
}
2022-03-15 14:14:05 +08:00
// disabled due to poor handling of 1-d mats
TEST_P ( Test_ONNX_layers , DISABLED_LSTM_bidirectional )
2020-03-22 05:20:36 +08:00
{
2020-03-22 21:04:30 +08:00
testONNXModels ( " lstm_bidirectional " , npy , 0 , 0 , false , false ) ;
2020-03-22 05:20:36 +08:00
}
2021-07-23 22:11:50 +08:00
TEST_P ( Test_ONNX_layers , LSTM_hidden )
{
testONNXModels ( " hidden_lstm " , npy , 0 , 0 , false , false ) ;
}
TEST_P ( Test_ONNX_layers , LSTM_hidden_bidirectional )
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
// IE exception: Node Transpose_45 was not assigned on any pointed device.
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# endif
2021-07-23 22:11:50 +08:00
testONNXModels ( " hidden_lstm_bi " , npy , 0 , 0 , false , false ) ;
}
2021-08-07 15:07:37 +08:00
TEST_P ( Test_ONNX_layers , GRU )
{
2022-04-05 06:02:40 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
// IE exception: Node GRU_22 was not assigned on any pointed device
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# endif
2021-08-07 15:07:37 +08:00
testONNXModels ( " gru " , npy , 0 , 0 , false , false ) ;
}
2023-04-20 15:11:33 +08:00
TEST_P ( Test_ONNX_layers , gru_cell_batchsize_50_seqlen_1 )
{
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
// IE exception: Node GRU_22 was not assigned on any pointed device
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# endif
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ;
testONNXModels ( " gru_cell_batchsize_50_seqlen_1 " , npy , 0 , 0 , false , false ) ;
}
TEST_P ( Test_ONNX_layers , gru_cell_batchsize_5_seqlen_5 )
{
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
// IE exception: Node GRU_22 was not assigned on any pointed device
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# endif
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ;
testONNXModels ( " gru_cell_batchsize_5_seqlen_5 " , npy , 0 , 0 , false , false ) ;
}
TEST_P ( Test_ONNX_layers , gru_cell_batchsize_1_seqlen_50 )
{
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
// IE exception: Node GRU_22 was not assigned on any pointed device
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# endif
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ;
testONNXModels ( " gru_cell_batchsize_1_seqlen_50 " , npy , 0 , 0 , false , false ) ;
}
2021-08-07 15:07:37 +08:00
TEST_P ( Test_ONNX_layers , GRU_bidirectional )
{
testONNXModels ( " gru_bi " , npy , 0 , 0 , false , false ) ;
}
2022-03-30 18:47:59 +08:00
TEST_P ( Test_ONNX_layers , LSTM_cell_forward )
2022-03-15 14:14:05 +08:00
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
// accuracy!
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_CPU )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_CPU , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2022-03-30 18:47:59 +08:00
// Ngraph operation Reshape with name LSTM_16/lstm_y/reshape has dynamic output shape on 0 port, but CPU plug-in supports only static shape
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL_FP16 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# endif
2022-03-15 14:14:05 +08:00
testONNXModels ( " lstm_cell_forward " , npy , 0 , 0 , false , false ) ;
2022-03-30 18:47:59 +08:00
}
TEST_P ( Test_ONNX_layers , LSTM_cell_bidirectional )
{
2022-12-03 07:55:16 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2022-03-30 18:47:59 +08:00
// Ngraph operation Reshape with name LSTM_16/lstm_y/reshape has dynamic output shape on 0 port, but CPU plug-in supports only static shape
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL_FP16 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# endif
2022-03-15 14:14:05 +08:00
testONNXModels ( " lstm_cell_bidirectional " , npy , 0 , 0 , false , false ) ;
2022-03-30 18:47:59 +08:00
}
TEST_P ( Test_ONNX_layers , LSTM_cell_with_peepholes )
{
2022-03-15 14:14:05 +08:00
testONNXModels ( " lstm_cell_with_peepholes " , npy , 0 , 0 , false , false ) ;
}
2023-04-20 15:11:33 +08:00
TEST_P ( Test_ONNX_layers , LSTM_cell_batchsize_50_seqlen_1 )
{
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ;
testONNXModels ( " lstm_cell_batchsize_50_seqlen_1 " , npy , 0 , 0 , false , false ) ;
}
TEST_P ( Test_ONNX_layers , LSTM_cell_batchsize_1_seqlen_50 )
{
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ;
testONNXModels ( " lstm_cell_batchsize_1_seqlen_50 " , npy , 0 , 0 , false , false ) ;
}
TEST_P ( Test_ONNX_layers , LSTM_cell_batchsize_5_seqlen_5 )
{
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ;
testONNXModels ( " lstm_cell_batchsize_5_seqlen_5 " , npy , 0 , 0 , false , false ) ;
}
2023-04-25 21:01:13 +08:00
TEST_P ( Test_ONNX_layers , LSTM_init_h0_c0 )
{
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ;
testONNXModels ( " lstm_init_h0_c0 " , npy , 0 , 0 , false , false , 3 ) ;
}
2023-05-18 03:46:56 +08:00
// epsilon is larger because onnx does not match with torch/opencv exactly
TEST_P ( Test_ONNX_layers , LSTM_layout_seq )
{
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ;
testONNXModels ( " lstm_layout_0 " , npy , 0.005 , 0.005 , false , false , 3 ) ;
}
// epsilon is larger because onnx does not match with torch/opencv exactly
TEST_P ( Test_ONNX_layers , LSTM_layout_batch )
{
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ;
testONNXModels ( " lstm_layout_1 " , npy , 0.005 , 0.005 , false , false , 3 ) ;
}
2023-04-25 21:01:13 +08:00
2023-09-22 16:25:02 +08:00
TEST_P ( Test_ONNX_layers , DISABLED_Einsum_1D )
{
testONNXModels ( " einsum_1d " , npy , 0 , 0 , false , false , 2 ) ;
}
TEST_P ( Test_ONNX_layers , Einsum_2D )
{
testONNXModels ( " einsum_2d " , npy , 0 , 0 , false , false , 2 ) ;
}
2023-10-24 21:47:00 +08:00
TEST_P ( Test_ONNX_layers , Einsum_2D_Ellipses )
{
2024-03-29 14:40:03 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2023-10-24 21:47:00 +08:00
testONNXModels ( " einsum_2d_ellipses " , npy , 0 , 0 , false , false , 2 ) ;
}
2023-09-22 16:25:02 +08:00
TEST_P ( Test_ONNX_layers , Einsum_3D )
{
testONNXModels ( " einsum_3d " , npy , 0 , 0 , false , false , 2 ) ;
}
TEST_P ( Test_ONNX_layers , Einsum_4D )
{
testONNXModels ( " einsum_4d " , npy , 0 , 0 , false , false , 2 ) ;
}
TEST_P ( Test_ONNX_layers , Einsum_5D )
{
testONNXModels ( " einsum_5d " , npy , 0 , 0 , false , false , 2 ) ;
}
TEST_P ( Test_ONNX_layers , DISABLED_Einsum_InnerProduct )
{
testONNXModels ( " einsum_inner " , npy , 0 , 0 , false , false , 2 ) ;
}
TEST_P ( Test_ONNX_layers , DISABLED_Einsum_HadamardProduct )
{
testONNXModels ( " einsum_hadamard " , npy , 0 , 0 , false , false , 2 ) ;
}
2023-10-24 21:47:00 +08:00
TEST_P ( Test_ONNX_layers , Einsum_Batch_Diagonal )
2023-09-22 16:25:02 +08:00
{
2024-03-29 14:40:03 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2023-09-22 16:25:02 +08:00
testONNXModels ( " einsum_batch_diagonal " , npy , 0 , 0 , false , false , 1 ) ;
}
TEST_P ( Test_ONNX_layers , Einsum_Sum )
{
testONNXModels ( " einsum_sum " , npy , 0 , 0 , false , false , 1 ) ;
}
TEST_P ( Test_ONNX_layers , Einsum_transpose )
{
testONNXModels ( " einsum_transpose " , npy , 0 , 0 , false , false , 1 ) ;
}
2023-12-25 19:42:05 +08:00
TEST_P ( Test_ONNX_layers , Einsum_const_inputs ) {
testONNXModels ( " einsum_const_inputs " , npy , 0 , 0 , false , false , 1 ) ;
}
2020-04-05 13:32:12 +08:00
TEST_P ( Test_ONNX_layers , Pad2d_Unfused )
{
testONNXModels ( " ReflectionPad2d " ) ;
testONNXModels ( " ZeroPad2d " ) ;
}
2020-08-26 18:15:59 +08:00
TEST_P ( Test_ONNX_layers , LinearWithConstant )
{
if ( backend = = DNN_BACKEND_OPENCV & & target = = DNN_TARGET_OPENCL_FP16 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_OPENCL_FP16 ) ;
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2020040000)
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE ) ;
# endif
2020-11-21 20:05:20 +08:00
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ;
2020-08-26 18:15:59 +08:00
testONNXModels ( " lin_with_constant " ) ;
}
TEST_P ( Test_ONNX_layers , MatmulWithTwoInputs )
{
if ( backend = = DNN_BACKEND_OPENCV & & target = = DNN_TARGET_OPENCL_FP16 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_OPENCL_FP16 ) ;
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2020040000)
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE ) ;
# endif
testONNXModels ( " matmul_with_two_inputs " ) ;
}
2020-09-17 19:05:22 +08:00
TEST_P ( Test_ONNX_layers , ResizeOpset11_Torch1_6 )
{
testONNXModels ( " resize_opset11_torch1.6 " ) ;
}
2021-01-15 19:01:48 +08:00
TEST_P ( Test_ONNX_layers , Mish )
{
testONNXModels ( " mish " ) ;
2022-07-28 11:21:29 +08:00
testONNXModels ( " mish_no_softplus " ) ;
2021-01-15 19:01:48 +08:00
}
2021-01-26 06:07:50 +08:00
TEST_P ( Test_ONNX_layers , CalculatePads )
{
testONNXModels ( " calc_pads " ) ;
}
2020-11-14 06:22:10 +08:00
TEST_P ( Test_ONNX_layers , Conv1d )
{
testONNXModels ( " conv1d " ) ;
}
TEST_P ( Test_ONNX_layers , Conv1d_bias )
{
testONNXModels ( " conv1d_bias " ) ;
}
TEST_P ( Test_ONNX_layers , Conv1d_variable_weight )
{
2021-01-22 06:16:56 +08:00
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ; // not supported
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ; // not supported
2020-11-14 06:22:10 +08:00
String basename = " conv1d_variable_w " ;
Net net = readNetFromONNX ( _tf ( " models/ " + basename + " .onnx " ) ) ;
ASSERT_FALSE ( net . empty ( ) ) ;
net . setPreferableBackend ( backend ) ;
net . setPreferableTarget ( target ) ;
Mat input = blobFromNPY ( _tf ( " data/input_ " + basename + " _0.npy " ) ) ;
Mat weights = blobFromNPY ( _tf ( " data/input_ " + basename + " _1.npy " ) ) ;
Mat ref = blobFromNPY ( _tf ( " data/output_ " + basename + " .npy " ) ) ;
net . setInput ( input , " 0 " ) ;
net . setInput ( weights , " 1 " ) ;
Mat out = net . forward ( ) ;
normAssert ( ref , out , " " , default_l1 , default_lInf ) ;
}
TEST_P ( Test_ONNX_layers , Conv1d_variable_weight_bias )
{
2021-01-22 06:16:56 +08:00
if ( backend = = DNN_BACKEND_CUDA )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_CUDA ) ; // not supported
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ; // not supported
2020-11-17 19:33:39 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2021-03-20 19:20:02 +08:00
if ( target = = DNN_TARGET_CPU & & getInferenceEngineCPUType ( ) = = CV_DNN_INFERENCE_ENGINE_CPU_TYPE_ARM_COMPUTE )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_ARM_CPU , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2020-11-17 19:33:39 +08:00
}
2020-11-14 06:22:10 +08:00
String basename = " conv1d_variable_wb " ;
Net net = readNetFromONNX ( _tf ( " models/ " + basename + " .onnx " ) ) ;
ASSERT_FALSE ( net . empty ( ) ) ;
net . setPreferableBackend ( backend ) ;
net . setPreferableTarget ( target ) ;
Mat input = blobFromNPY ( _tf ( " data/input_ " + basename + " _0.npy " ) ) ;
Mat weights = blobFromNPY ( _tf ( " data/input_ " + basename + " _1.npy " ) ) ;
Mat bias = blobFromNPY ( _tf ( " data/input_ " + basename + " _2.npy " ) ) ;
Mat ref = blobFromNPY ( _tf ( " data/output_ " + basename + " .npy " ) ) ;
net . setInput ( input , " 0 " ) ;
net . setInput ( weights , " 1 " ) ;
net . setInput ( bias , " bias " ) ;
Mat out = net . forward ( ) ;
normAssert ( ref , out , " " , default_l1 , default_lInf ) ;
}
2020-11-17 17:52:08 +08:00
TEST_P ( Test_ONNX_layers , GatherMultiOutput )
{
2021-11-26 03:56:27 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
// IE Exception: Ngraph operation Reshape with name 6 has dynamic output shape on 0 port, but CPU plug-in supports only static shape
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# endif
2021-03-24 17:28:05 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021030000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ; // exception
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL_FP16 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ; // exception
# endif
2021-11-26 03:56:27 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2021030000)
2020-11-18 03:52:07 +08:00
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE ) ;
# endif
2022-03-16 23:41:39 +08:00
testONNXModels ( " gather_multi_output " , npy , 0 , 0 , false , false ) ;
2020-11-17 17:52:08 +08:00
}
2021-12-23 08:39:09 +08:00
TEST_P ( Test_ONNX_layers , DynamicAxes_squeeze_and_conv )
{
# if defined(INF_ENGINE_RELEASE)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
# if INF_ENGINE_VER_MAJOR_LT(2021000000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
# endif
# endif
testONNXModels ( " squeeze_and_conv_dynamic_axes " ) ;
}
TEST_P ( Test_ONNX_layers , DynamicAxes_unsqueeze_and_conv )
{
# if defined(INF_ENGINE_RELEASE)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
# if INF_ENGINE_VER_MAJOR_LT(2021000000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
# endif
# endif
testONNXModels ( " unsqueeze_and_conv_dynamic_axes " ) ;
}
TEST_P ( Test_ONNX_layers , DynamicAxes_gather )
{
# if defined(INF_ENGINE_RELEASE)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
# if INF_ENGINE_VER_MAJOR_LT(2021000000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
# endif
# endif
2022-03-16 23:41:39 +08:00
testONNXModels ( " gather_dynamic_axes " , npy , 0 , 0 , false , false ) ;
2021-12-23 08:39:09 +08:00
}
TEST_P ( Test_ONNX_layers , DynamicAxes_gather_scalar )
2020-11-17 18:31:04 +08:00
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
2021-11-26 03:56:27 +08:00
// accuracy
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
2022-03-31 03:03:38 +08:00
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
// accuracy
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# elif defined(INF_ENGINE_RELEASE)
2020-11-17 18:31:04 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
2021-11-26 03:56:27 +08:00
# if INF_ENGINE_VER_MAJOR_LT(2021000000)
2020-11-17 18:31:04 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
2021-11-26 03:56:27 +08:00
# endif
# endif
2022-03-16 23:41:39 +08:00
testONNXModels ( " gather_scalar_dynamic_axes " , npy , 0 , 0 , false , false ) ;
2021-12-23 08:39:09 +08:00
}
TEST_P ( Test_ONNX_layers , DynamicAxes_slice )
{
# if defined(INF_ENGINE_RELEASE)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
# if INF_ENGINE_VER_MAJOR_LT(2021000000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
# endif
# endif
2020-11-17 18:31:04 +08:00
testONNXModels ( " slice_dynamic_axes " ) ;
2021-12-23 08:39:09 +08:00
}
TEST_P ( Test_ONNX_layers , DynamicAxes_slice_opset_11 )
{
# if defined(INF_ENGINE_RELEASE)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
# if INF_ENGINE_VER_MAJOR_LT(2021000000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
# endif
# endif
2020-11-17 18:31:04 +08:00
testONNXModels ( " slice_opset_11_dynamic_axes " ) ;
2021-12-23 08:39:09 +08:00
}
TEST_P ( Test_ONNX_layers , DynamicAxes_resize_opset11_torch16 )
{
# if defined(INF_ENGINE_RELEASE)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
# if INF_ENGINE_VER_MAJOR_LT(2021000000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
# endif
# endif
2020-11-17 18:31:04 +08:00
testONNXModels ( " resize_opset11_torch1.6_dynamic_axes " ) ;
2021-12-23 08:39:09 +08:00
}
TEST_P ( Test_ONNX_layers , DynamicAxes_average_pooling )
{
# if defined(INF_ENGINE_RELEASE)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
# if INF_ENGINE_VER_MAJOR_LT(2021000000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
# endif
# endif
2020-11-17 18:31:04 +08:00
testONNXModels ( " average_pooling_dynamic_axes " ) ;
2021-12-23 08:39:09 +08:00
}
TEST_P ( Test_ONNX_layers , DynamicAxes_maxpooling_sigmoid )
{
# if defined(INF_ENGINE_RELEASE)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
# if INF_ENGINE_VER_MAJOR_LT(2021000000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
# endif
# endif
2020-11-17 18:31:04 +08:00
testONNXModels ( " maxpooling_sigmoid_dynamic_axes " ) ;
2021-12-23 08:39:09 +08:00
}
TEST_P ( Test_ONNX_layers , DynamicAxes_dynamic_batch )
{
# if defined(INF_ENGINE_RELEASE)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
# if INF_ENGINE_VER_MAJOR_LT(2021000000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
# endif
# endif
2021-08-18 23:37:35 +08:00
testONNXModels ( " dynamic_batch " ) ;
2020-11-17 18:31:04 +08:00
}
2021-12-23 08:39:09 +08:00
2020-11-25 00:52:45 +08:00
TEST_P ( Test_ONNX_layers , MaxPool1d )
{
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2020-11-25 00:52:45 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
2021-11-30 20:08:35 +08:00
# endif
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2021040000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
{
// 2021.4: [ GENERAL_ERROR ] AssertionFailed: !expired()
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
# endif
2020-11-25 00:52:45 +08:00
testONNXModels ( " maxpooling_1d " ) ;
}
TEST_P ( Test_ONNX_layers , MaxPoolSigmoid1d )
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_CPU )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_CPU , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2020-11-25 00:52:45 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
2021-11-30 20:08:35 +08:00
# endif
2020-11-25 00:52:45 +08:00
testONNXModels ( " maxpooling_sigmoid_1d " ) ;
}
TEST_P ( Test_ONNX_layers , MaxPool1d_Twise )
{
2022-12-03 07:55:16 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2020-11-25 00:52:45 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
2021-11-30 20:08:35 +08:00
# endif
2020-11-25 00:52:45 +08:00
testONNXModels ( " two_maxpooling_1d " ) ;
}
TEST_P ( Test_ONNX_layers , AvePool1d )
{
2022-12-03 07:55:16 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2020-11-25 00:52:45 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
2021-11-30 20:08:35 +08:00
# endif
2020-11-25 00:52:45 +08:00
testONNXModels ( " average_pooling_1d " ) ;
}
TEST_P ( Test_ONNX_layers , PoolConv1d )
{
2022-12-03 07:55:16 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2020-11-25 00:52:45 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
2021-11-30 20:08:35 +08:00
# endif
2020-11-25 00:52:45 +08:00
testONNXModels ( " pool_conv_1d " ) ;
}
TEST_P ( Test_ONNX_layers , ConvResizePool1d )
{
2021-11-26 03:56:27 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
// IE Exception: Ngraph operation Reshape with name 15 has dynamic output shape on 0 port, but CPU plug-in supports only static shape
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# endif
2021-03-24 17:28:05 +08:00
# if defined(INF_ENGINE_RELEASE)
2020-11-25 00:52:45 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
}
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2021-03-24 17:28:05 +08:00
# if INF_ENGINE_VER_MAJOR_EQ(2021030000)
if ( target = = DNN_TARGET_OPENCL ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ; // exception
if ( target = = DNN_TARGET_OPENCL_FP16 ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ; // exception
# endif
2020-11-25 00:52:45 +08:00
}
2021-03-24 17:28:05 +08:00
# endif
2023-11-20 18:45:37 +08:00
const double lInf = ( target = = DNN_TARGET_CPU_FP16 ) ? 0.024 : default_lInf ;
testONNXModels ( " conv_resize_pool_1d " , npy , default_l1 , lInf ) ;
2020-11-25 00:52:45 +08:00
}
2023-01-11 08:42:51 +08:00
TEST_P ( Test_ONNX_layers , DepthWiseAdd )
{
testONNXModels ( " depthwiseconv_add " ) ;
}
2023-01-28 11:41:00 +08:00
TEST_P ( Test_ONNX_layers , DepthStride2 )
{
testONNXModels ( " depthwise_stride2 " ) ;
}
2021-10-11 23:58:10 +08:00
TEST_P ( Test_ONNX_layers , SubFromConst )
{
testONNXModels ( " sub_from_const1 " ) ;
testONNXModels ( " sub_from_const_eltwise " ) ;
testONNXModels ( " sub_from_const_broadcast " ) ;
}
2021-12-21 00:53:37 +08:00
TEST_P ( Test_ONNX_layers , DivConst )
{
testONNXModels ( " div_const " ) ;
}
2022-06-22 15:00:17 +08:00
TEST_P ( Test_ONNX_layers , Gemm )
{
testONNXModels ( " gemm_no_transB " ) ;
testONNXModels ( " gemm_transB_0 " ) ;
2022-11-29 17:13:36 +08:00
testONNXModels ( " gemm_first_const " ) ;
2022-06-22 15:00:17 +08:00
}
2021-10-15 21:35:03 +08:00
2022-12-29 17:14:02 +08:00
TEST_P ( Test_ONNX_layers , Gemm_bias )
{
testONNXModels ( " gemm_vector_bias " ) ;
}
2021-10-05 02:07:38 +08:00
TEST_P ( Test_ONNX_layers , Quantized_Convolution )
{
2022-11-01 00:06:31 +08:00
// The difference of QOperator and QDQ format:
// https://onnxruntime.ai/docs/performance/quantization.html#onnx-quantization-representation-format.
{
SCOPED_TRACE ( " QOperator quantized model. " ) ;
testONNXModels ( " quantized_conv_uint8_weights " , npy , 0.004 , 0.02 ) ;
testONNXModels ( " quantized_conv_int8_weights " , npy , 0.03 , 0.5 ) ;
testONNXModels ( " quantized_conv_per_channel_weights " , npy , 0.06 , 0.4 ) ;
testONNXModels ( " quantized_conv_asymmetric_pads_int8_weights " ) ;
}
2022-05-16 19:01:37 +08:00
2022-11-01 00:06:31 +08:00
{
SCOPED_TRACE ( " QDQ quantized model. " ) ;
testONNXModels ( " quantized_conv_uint8_weights_qdq " , npy , 0.004 , 0.02 ) ;
testONNXModels ( " quantized_conv_int8_weights_qdq " , npy , 0.03 , 0.5 ) ;
testONNXModels ( " quantized_conv_per_channel_weights_qdq " , npy , 0.06 , 0.4 ) ;
}
2021-10-05 02:07:38 +08:00
}
TEST_P ( Test_ONNX_layers , Quantized_MatMul )
{
testONNXModels ( " quantized_matmul_uint8_weights " , npy , 0.005 , 0.007 ) ;
testONNXModels ( " quantized_matmul_int8_weights " , npy , 0.06 , 0.2 ) ;
testONNXModels ( " quantized_matmul_per_channel_weights " , npy , 0.06 , 0.22 ) ;
}
2022-08-30 09:50:29 +08:00
TEST_P ( Test_ONNX_layers , Quantized_Gemm )
{
testONNXModels ( " quantized_gemm " , npy ) ;
}
2021-10-05 02:07:38 +08:00
TEST_P ( Test_ONNX_layers , Quantized_MatMul_Variable_Weights )
{
// Unsupported
EXPECT_THROW (
{
testONNXModels ( " quantized_matmul_variable_inputs " ) ;
} , cv : : Exception ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_Eltwise )
{
testONNXModels ( " quantized_eltwise " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_Eltwise_Scalar )
{
testONNXModels ( " quantized_eltwise_scalar " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_Eltwise_Broadcast )
{
testONNXModels ( " quantized_eltwise_broadcast " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_LeakyReLU )
{
testONNXModels ( " quantized_leaky_relu " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_Sigmoid )
{
testONNXModels ( " quantized_sigmoid " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_MaxPool )
{
testONNXModels ( " quantized_maxpool " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_AvgPool )
{
testONNXModels ( " quantized_avgpool " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_Split )
{
testONNXModels ( " quantized_split " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_Pad )
{
testONNXModels ( " quantized_padding " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_Reshape )
{
testONNXModels ( " quantized_reshape " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_Transpose )
{
testONNXModels ( " quantized_transpose " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_Squeeze )
{
testONNXModels ( " quantized_squeeze " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_Unsqueeze )
{
testONNXModels ( " quantized_unsqueeze " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_Resize )
{
testONNXModels ( " quantized_resize_nearest " ) ;
2023-09-28 21:24:43 +08:00
double l1 = backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH ? 0.0013 : 2e-4 ;
testONNXModels ( " quantized_resize_bilinear " , npy , l1 , 0.003 ) ;
l1 = backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH ? 0.0013 : 3e-4 ;
testONNXModels ( " quantized_resize_bilinear_align " , npy , l1 , 0.003 ) ;
2021-10-05 02:07:38 +08:00
}
TEST_P ( Test_ONNX_layers , Quantized_Concat )
{
2023-09-28 21:24:43 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2021-10-05 02:07:38 +08:00
testONNXModels ( " quantized_concat " ) ;
testONNXModels ( " quantized_concat_const_blob " ) ;
}
TEST_P ( Test_ONNX_layers , Quantized_Constant )
{
testONNXModels ( " quantized_constant " , npy , 0.002 , 0.008 ) ;
}
2022-05-23 22:50:42 +08:00
TEST_P ( Test_ONNX_layers , OutputRegistration )
{
testONNXModels ( " output_registration " , npy , 0 , 0 , false , true , 2 ) ;
}
2023-05-25 18:35:58 +08:00
TEST_P ( Test_ONNX_layers , QLinearSoftmax )
{
2023-09-28 21:24:43 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2023-05-25 18:35:58 +08:00
testONNXModels ( " qlinearsoftmax_v11 " , npy , 0.002 , 0.002 ) ; // 2D coerced
testONNXModels ( " qlinearsoftmax_v13 " , npy , 0.002 , 0.002 ) ;
}
2018-09-11 02:07:51 +08:00
INSTANTIATE_TEST_CASE_P ( /*nothing*/ , Test_ONNX_layers , dnnBackendsAndTargets ( ) ) ;
2019-06-20 21:43:28 +08:00
class Test_ONNX_nets : public Test_ONNX_layers
{
public :
Test_ONNX_nets ( ) { required = false ; }
} ;
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_nets , Alexnet )
{
2020-04-30 04:01:10 +08:00
# if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
2020-02-21 04:23:19 +08:00
applyTestTag ( CV_TEST_TAG_MEMORY_2GB ) ;
# else
2018-10-09 06:38:06 +08:00
applyTestTag ( target = = DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB ) ;
2020-02-21 04:23:19 +08:00
# endif
2019-06-20 21:43:28 +08:00
const String model = _tf ( " models/alexnet.onnx " , false ) ;
2018-09-11 02:07:51 +08:00
Net net = readNetFromONNX ( model ) ;
ASSERT_FALSE ( net . empty ( ) ) ;
net . setPreferableBackend ( backend ) ;
net . setPreferableTarget ( target ) ;
2023-11-20 18:45:37 +08:00
net . enableWinograd ( false ) ;
2018-09-11 02:07:51 +08:00
Mat inp = imread ( _tf ( " ../grace_hopper_227.png " ) ) ;
Mat ref = blobFromNPY ( _tf ( " ../caffe_alexnet_prob.npy " ) ) ;
checkBackend ( & inp , & ref ) ;
net . setInput ( blobFromImage ( inp , 1.0f , Size ( 227 , 227 ) , Scalar ( ) , false ) ) ;
ASSERT_FALSE ( net . empty ( ) ) ;
Mat out = net . forward ( ) ;
normAssert ( out , ref , " " , default_l1 , default_lInf ) ;
2019-04-19 19:54:08 +08:00
expectNoFallbacksFromIE ( net ) ;
2018-09-11 02:07:51 +08:00
}
2023-10-18 15:41:47 +08:00
TEST_P ( Test_ONNX_nets , RAFT )
{
applyTestTag ( CV_TEST_TAG_LONG , CV_TEST_TAG_DEBUG_VERYLONG , CV_TEST_TAG_MEMORY_2GB ) ;
std : : string weight_path = _tf ( " models/optical_flow_estimation_raft_2023aug.onnx " , false ) ;
std : : string img0_path = findDataFile ( std : : string ( " gpu/opticalflow/frame0.png " ) ) ;
std : : string img1_path = findDataFile ( std : : string ( " gpu/opticalflow/frame1.png " ) ) ;
Size target_size { 480 , 360 } ;
auto img0 = imread ( img0_path ) ;
auto img1 = imread ( img1_path ) ;
auto blob0 = blobFromImage ( img0 , 1.0 , target_size , 0 , true ) ;
auto blob1 = blobFromImage ( img1 , 1.0 , target_size , 0 , true ) ;
auto net = readNet ( weight_path ) ;
net . setInput ( blob0 , " 0 " ) ;
net . setInput ( blob1 , " 1 " ) ;
std : : vector < std : : string > outnames { " 12007 " , " 12006 " } ;
std : : vector < Mat > outs ;
net . forward ( outs , outnames ) ;
// output 12006 is not checked to save space in opencv_extra since its ref is > 1MB,
// and output 12006 is calculated from 12007 so checking 12007 is sufficient.
std : : string ref_12700_path = _tf ( " data/output_optical_flow_estimation_raft_2023aug.npy " ) ;
auto ref0 = blobFromNPY ( ref_12700_path ) ;
normAssert ( ref0 , outs [ 0 ] , " " , 1e-5 , 1.8e-4 ) ;
}
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_nets , Squeezenet )
{
testONNXModels ( " squeezenet " , pb ) ;
}
TEST_P ( Test_ONNX_nets , Googlenet )
{
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
// accuracy
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
// accuracy
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2018-09-11 02:07:51 +08:00
2019-12-24 18:34:33 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2021-11-30 20:08:35 +08:00
# endif
2019-12-24 18:34:33 +08:00
2019-06-20 21:43:28 +08:00
const String model = _tf ( " models/googlenet.onnx " , false ) ;
2018-09-11 02:07:51 +08:00
Net net = readNetFromONNX ( model ) ;
ASSERT_FALSE ( net . empty ( ) ) ;
net . setPreferableBackend ( backend ) ;
net . setPreferableTarget ( target ) ;
2023-11-20 18:45:37 +08:00
if ( target = = DNN_TARGET_CPU_FP16 )
net . enableWinograd ( false ) ;
2018-09-11 02:07:51 +08:00
std : : vector < Mat > images ;
images . push_back ( imread ( _tf ( " ../googlenet_0.png " ) ) ) ;
images . push_back ( imread ( _tf ( " ../googlenet_1.png " ) ) ) ;
Mat inp = blobFromImages ( images , 1.0f , Size ( ) , Scalar ( ) , false ) ;
Mat ref = blobFromNPY ( _tf ( " ../googlenet_prob.npy " ) ) ;
checkBackend ( & inp , & ref ) ;
net . setInput ( inp ) ;
ASSERT_FALSE ( net . empty ( ) ) ;
Mat out = net . forward ( ) ;
normAssert ( ref , out , " " , default_l1 , default_lInf ) ;
2019-04-19 19:54:08 +08:00
expectNoFallbacksFromIE ( net ) ;
2018-09-11 02:07:51 +08:00
}
TEST_P ( Test_ONNX_nets , CaffeNet )
{
2020-04-30 04:01:10 +08:00
# if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
2020-02-21 04:23:19 +08:00
applyTestTag ( CV_TEST_TAG_MEMORY_2GB ) ;
# else
2018-10-09 06:38:06 +08:00
applyTestTag ( target = = DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB ) ;
2020-02-21 04:23:19 +08:00
# endif
2019-10-04 15:29:27 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target = = DNN_TARGET_MYRIAD
2019-10-04 15:29:27 +08:00
& & getInferenceEngineVPUType ( ) = = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X )
2019-12-02 21:16:06 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
2019-10-04 15:29:27 +08:00
# endif
2018-09-11 02:07:51 +08:00
testONNXModels ( " caffenet " , pb ) ;
}
TEST_P ( Test_ONNX_nets , RCNN_ILSVRC13 )
{
2020-04-30 04:01:10 +08:00
# if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
2020-02-21 04:23:19 +08:00
applyTestTag ( CV_TEST_TAG_MEMORY_2GB ) ;
# else
2018-10-09 06:38:06 +08:00
applyTestTag ( target = = DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB ) ;
2020-02-21 04:23:19 +08:00
# endif
2019-10-04 15:29:27 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target = = DNN_TARGET_MYRIAD
2019-10-04 15:29:27 +08:00
& & getInferenceEngineVPUType ( ) = = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X )
2019-12-02 21:16:06 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
2019-10-04 15:29:27 +08:00
# endif
2019-03-29 21:42:58 +08:00
// Reference output values are in range [-4.992, -1.161]
2019-12-24 18:34:33 +08:00
testONNXModels ( " rcnn_ilsvrc13 " , pb , 0.0046 ) ;
2018-09-11 02:07:51 +08:00
}
TEST_P ( Test_ONNX_nets , VGG16_bn )
{
2018-10-09 06:38:06 +08:00
applyTestTag ( CV_TEST_TAG_MEMORY_6GB ) ; // > 2.3Gb
2019-03-29 21:42:58 +08:00
// output range: [-16; 27], after Softmax [0; 0.67]
const double lInf = ( target = = DNN_TARGET_MYRIAD ) ? 0.038 : default_lInf ;
testONNXModels ( " vgg16-bn " , pb , default_l1 , lInf , true ) ;
2018-09-11 02:07:51 +08:00
}
TEST_P ( Test_ONNX_nets , ZFNet )
{
2019-05-27 20:14:18 +08:00
applyTestTag ( CV_TEST_TAG_MEMORY_2GB ) ;
2018-09-11 02:07:51 +08:00
testONNXModels ( " zfnet512 " , pb ) ;
}
TEST_P ( Test_ONNX_nets , ResNet18v1 )
{
2018-10-09 06:38:06 +08:00
applyTestTag ( CV_TEST_TAG_MEMORY_512MB ) ;
2019-03-29 21:42:58 +08:00
// output range: [-16; 22], after Softmax [0, 0.51]
2019-05-30 22:36:00 +08:00
testONNXModels ( " resnet18v1 " , pb , default_l1 , default_lInf , true , target ! = DNN_TARGET_MYRIAD ) ;
2018-09-11 02:07:51 +08:00
}
TEST_P ( Test_ONNX_nets , ResNet50v1 )
{
2018-10-09 06:38:06 +08:00
applyTestTag ( CV_TEST_TAG_MEMORY_512MB ) ;
2019-03-29 21:42:58 +08:00
// output range: [-67; 75], after Softmax [0, 0.98]
2024-03-25 14:03:28 +08:00
size_t hwm0 = getTopMemoryUsageMB ( ) ;
2019-05-30 22:36:00 +08:00
testONNXModels ( " resnet50v1 " , pb , default_l1 , default_lInf , true , target ! = DNN_TARGET_MYRIAD ) ;
2024-03-25 14:03:28 +08:00
size_t hwm1 = getTopMemoryUsageMB ( ) ;
if ( backend = = DNN_BACKEND_OPENCV & & target = = DNN_TARGET_CPU )
{
EXPECT_LE ( hwm1 - hwm0 , 350 ) < < " Top allocated memory " ;
}
2018-09-11 02:07:51 +08:00
}
2021-12-25 08:45:41 +08:00
TEST_P ( Test_ONNX_nets , ResNet50_Int8 )
2021-10-05 02:07:38 +08:00
{
testONNXModels ( " resnet50_int8 " , pb , default_l1 , default_lInf , true ) ;
}
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_nets , ResNet101_DUC_HDC )
{
2018-10-09 06:38:06 +08:00
applyTestTag ( CV_TEST_TAG_VERYLONG ) ;
2019-04-01 20:00:25 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
2019-03-29 21:42:58 +08:00
# endif
# if defined(INF_ENGINE_RELEASE)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2019-03-29 21:42:58 +08:00
# endif
if ( target = = DNN_TARGET_OPENCL_FP16 | | target = = DNN_TARGET_OPENCL )
2019-06-15 20:17:25 +08:00
{
if ( backend = = DNN_BACKEND_OPENCV )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16 ) ;
2019-03-29 21:42:58 +08:00
throw SkipTestException ( " Test is disabled for OpenCL targets " ) ;
2019-06-15 20:17:25 +08:00
}
2018-09-11 02:07:51 +08:00
testONNXModels ( " resnet101_duc_hdc " , pb ) ;
}
TEST_P ( Test_ONNX_nets , TinyYolov2 )
{
2018-10-09 06:38:06 +08:00
applyTestTag ( CV_TEST_TAG_MEMORY_512MB ) ;
2019-03-29 21:42:58 +08:00
if ( cvtest : : skipUnstableTests )
throw SkipTestException ( " Skip unstable test " ) ;
# if defined(INF_ENGINE_RELEASE)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019
2019-03-29 21:42:58 +08:00
& & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 )
)
2019-12-02 21:16:06 +08:00
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2019-03-29 21:42:58 +08:00
2019-12-24 18:34:33 +08:00
if ( target = = DNN_TARGET_MYRIAD & & getInferenceEngineVPUType ( ) = = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
2019-03-29 21:42:58 +08:00
)
2019-12-24 18:34:33 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X ,
backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2019-03-29 21:42:58 +08:00
# endif
2018-10-09 06:38:06 +08:00
2018-09-11 02:07:51 +08:00
// output range: [-11; 8]
2019-12-20 21:36:32 +08:00
double l1 = default_l1 , lInf = default_lInf ;
Merge pull request #22275 from zihaomu:fp16_support_conv
DNN: FP16 support on Convolution 2D #22275
## FP16 support on ARM platform
This PR proposes to support FP16 backend in Convolution.
For now, we only support FP16 at ARM aarch64.
In addition to adding fp16, I also added `seperateIm2col` optimization in this patch.
## How to use FP16 to speed up convolution?
```
Net net = readNet(modelPath);
net.setPreferableTarget(DNN_TARGET_CPU_FP16);
net.setInput(blob);
Mat output = net.forward();
```
### TODO List
| Task | Status | Remarks |
|:-------:|:--------:|:------------:|
| Convolution 2D FP16 | :heavy_check_mark: | Done |
| Winograd FP16 | Because the current modification has reached 2k lines, winograd fp16 will be completed in the next PR. | |
| Accuracy Test | :heavy_check_mark: | Done |
| Performance Test | :heavy_check_mark: | Done |
| Compiler bug | :heavy_check_mark: | Done |
### Speed Test for FP 16.
**Test on M1 chip, 4 threads.**
| Model Name | FP32 (Conv+Wino) | Conv(FP16) + Wino(FP 32) |
|:-------:|:--------:|:------------:|
| ReseNet 50 | 26.0 ms | **18.05 ms** (25% speed up)|
| MobileNet V2 | 4.17 ms | **3.09 ms (29% speed up)** |
### Speed Test for `seperateIm2col` trick on X86.
**Test on AMD 5600x, 12 threads.**
| Model Name | 4.x | Patch |
|:-------:|:--------:|:------------:|
| MobileNet V2 | 5.6 ms | **3.0 ms (46% speed up)** |
### Performance Test
#### Performance Test of X86 platform: AMD 5600X, with `-perf_threas=1`
|Name of Test|4.x|patch|patch vs 4.x (x-factor)|
|---|:-:|:-:|:-:|
|Name of Test|4.x 0|fp16pr final|fp16pr final vs 4.x 0 (x-factor)|
|---|:-:|:-:|:-:|
|conv1d::Conv1D::(GFLOPS=0.000, K=[3], IN={1, 2, 19}, OCN=2, G=2, S=2, P=(1, 1), BIAS, OCV/CPU)|0.001|0.001|1.00|
|conv1d::Conv1D::(GFLOPS=0.000, K=[3], IN={1, 2, 25}, OCN=2, G=2, P=(2, 2), PM=SAME, OCV/CPU)|0.001|0.001|1.03|
|conv1d::Conv1D::(GFLOPS=0.000, K=[3], IN={1, 6, 10}, OCN=6, PM=VALID, BIAS, OCV/CPU)|0.001|0.001|0.92|
|conv3d::Conv3D::(GFLOPS=0.000, K=[1 x 1 x 1], IN={1, 4, 9, 10, 10}, OCN=4, S=[1 x 1 x 2], P=(1, 1) x (1, 1) x (1, 1), PM=VALID, OCV/CPU)|0.002|0.003|0.95|
|conv3d::Conv3D::(GFLOPS=0.000, K=[1 x 1 x 1], IN={1, 8, 1, 10, 10}, OCN=8, G=8, P=(1, 1) x (1, 1) x (1, 1), BIAS, OCV/CPU)|0.006|0.006|1.00|
|conv3d::Conv3D::(GFLOPS=0.000, K=[3 x 3 x 3], IN={1, 2, 19, 19, 19}, OCN=2, G=2, S=[2 x 2 x 2], P=(1, 1) x (1, 1) x (1, 1), BIAS, OCV/CPU)|0.045|0.033|1.39|
|conv3d::Conv3D::(GFLOPS=0.000, K=[3 x 4 x 2], IN={1, 4, 8, 10, 10}, OCN=4, G=4, S=[1 x 2 x 1], BIAS, OCV/CPU)|0.011|0.009|1.17|
|conv3d::Conv3D::(GFLOPS=0.001, K=[3 x 3 x 3], IN={1, 2, 25, 19, 19}, OCN=2, G=2, S=[1 x 2 x 2], P=(2, 2) x (2, 2) x (2, 2), PM=SAME, OCV/CPU)|0.109|0.078|1.39|
|conv3d::Conv3D::(GFLOPS=0.002, K=[3 x 1 x 4], IN={1, 14, 5, 10, 10}, OCN=14, PM=SAME, OCV/CPU)|0.040|0.042|0.94|
|conv3d::Conv3D::(GFLOPS=0.006, K=[5 x 5 x 5], IN={1, 4, 50, 19, 19}, OCN=4, S=[2 x 2 x 2], P=(1, 1) x (1, 1) x (1, 1), PM=VALID, OCV/CPU)|0.326|0.342|0.95|
|conv3d::Conv3D::(GFLOPS=0.027, K=[3 x 3 x 3], IN={1, 6, 10, 38, 50}, OCN=6, PM=VALID, BIAS, OCV/CPU)|0.580|0.589|0.99|
|conv3d::Conv3D::(GFLOPS=0.030, K=[5 x 5 x 5], IN={1, 6, 19, 19, 19}, OCN=6, G=2, OCV/CPU)|1.293|1.382|0.94|
|conv3d::Conv3D::(GFLOPS=0.045, K=[7 x 7 x 7], IN={1, 2, 38, 38, 38}, OCN=2, S=[1 x 2 x 1], OCV/CPU)|3.590|3.710|0.97|
|conv3d::Conv3D::(GFLOPS=0.053, K=[3 x 3 x 3], IN={1, 10, 98, 10, 10}, OCN=10, PM=SAME, OCV/CPU)|1.120|1.191|0.94|
|conv3d::Conv3D::(GFLOPS=0.071, K=[7 x 7 x 7], IN={1, 6, 15, 19, 19}, OCN=6, S=[2 x 1 x 1], P=(3, 3) x (3, 3) x (3, 3), PM=SAME, BIAS, OCV/CPU)|2.576|2.872|0.90|
|conv3d::Conv3D::(GFLOPS=0.093, K=[5 x 5 x 5], IN={1, 4, 40, 75, 75}, OCN=4, S=[2 x 2 x 2], OCV/CPU)|4.599|4.670|0.98|
|conv3d::Conv3D::(GFLOPS=0.116, K=[5 x 5 x 5], IN={1, 2, 21, 75, 100}, OCN=2, BIAS, OCV/CPU)|9.230|9.582|0.96|
|conv3d::Conv3D::(GFLOPS=1.267, K=[5 x 5 x 5], IN={1, 3, 75, 75, 100}, OCN=3, PM=SAME, BIAS, OCV/CPU)|65.946|69.381|0.95|
|conv3d::Conv3D::(GFLOPS=1.343, K=[3 x 3 x 3], IN={1, 11, 9, 150, 200}, OCN=11, PM=VALID, BIAS, OCV/CPU)|18.915|19.289|0.98|
|conv::Conv::(GFLOPS=0.177, K=[1 x 1], IN={1, 512, 26, 26}, OCN=256, OCV/CPU)|1.404|1.457|0.96|
|conv::Conv::(GFLOPS=0.177, K=[1 x 1], IN={1, 1024, 13, 13}, OCN=512, OCV/CPU)|2.060|1.501|1.37|
|conv::Conv::(GFLOPS=0.178, K=[1 x 1], IN={1, 256, 52, 52}, OCN=128, OCV/CPU)|1.409|1.464|0.96|
|conv::Conv::(GFLOPS=0.210, K=[1 x 1], IN={1, 576, 38, 50}, OCN=96, PM=SAME, BIAS, OCV/CPU)|1.793|1.838|0.98|
|conv::Conv::(GFLOPS=0.231, K=[3 x 3], IN={1, 128, 56, 56}, OCN=32, P=[1 x 1], OCV/CPU)|1.207|1.199|1.01|
|conv::Conv::(GFLOPS=0.231, K=[3 x 3], IN={1, 256, 14, 14}, OCN=256, P=[1 x 1], OCV/CPU)|1.277|1.275|1.00|
|conv::Conv::(GFLOPS=0.280, K=[1 x 1], IN={1, 576, 38, 50}, OCN=128, PM=SAME, BIAS, OCV/CPU)|2.319|2.370|0.98|
|conv::Conv::(GFLOPS=0.302, K=[3 x 3], IN={1, 64, 64, 64}, OCN=64, PM=SAME, OCV/CPU)|1.351|1.346|1.00|
|conv::Conv::(GFLOPS=0.357, K=[1 x 1], IN={1, 64, 208, 208}, OCN=64, OCV/CPU)|3.520|3.612|0.97|
|conv::Conv::(GFLOPS=0.420, K=[3 x 3], IN={1, 96, 38, 50}, OCN=128, PM=SAME, BIAS, OCV/CPU)|1.876|1.880|1.00|
|conv::Conv::(GFLOPS=0.472, K=[3 x 3], IN={1, 128, 40, 40}, OCN=128, PM=SAME, OCV/CPU)|1.981|1.995|0.99|
|conv::Conv::(GFLOPS=0.472, K=[3 x 3], IN={1, 256, 20, 20}, OCN=256, PM=SAME, OCV/CPU)|2.620|2.627|1.00|
|conv::Conv::(GFLOPS=0.472, K=[3 x 3], IN={1, 512, 10, 10}, OCN=512, PM=SAME, OCV/CPU)|4.202|4.123|1.02|
|conv::Conv::(GFLOPS=0.561, K=[3 x 3], IN={1, 128, 38, 50}, OCN=128, PM=SAME, BIAS, OCV/CPU)|2.429|2.445|0.99|
|conv::Conv::(GFLOPS=0.624, K=[3 x 3], IN={1, 128, 46, 46}, OCN=128, P=[1 x 1], BIAS, OCV/CPU)|2.591|2.576|1.01|
|conv::Conv::(GFLOPS=0.701, K=[3 x 3], IN={1, 128, 38, 50}, OCN=160, PM=SAME, BIAS, OCV/CPU)|3.005|2.998|1.00|
|conv::Conv::(GFLOPS=0.798, K=[3 x 3], IN={1, 64, 104, 104}, OCN=64, P=[1 x 1], OCV/CPU)|3.515|3.532|1.00|
|conv::Conv::(GFLOPS=0.798, K=[3 x 3], IN={1, 128, 52, 52}, OCN=128, P=[1 x 1], OCV/CPU)|3.115|3.134|0.99|
|conv::Conv::(GFLOPS=0.798, K=[3 x 3], IN={1, 256, 26, 26}, OCN=256, P=[1 x 1], OCV/CPU)|3.937|3.899|1.01|
|conv::Conv::(GFLOPS=0.798, K=[3 x 3], IN={1, 512, 13, 13}, OCN=512, P=[1 x 1], OCV/CPU)|5.533|5.471|1.01|
|conv::Conv::(GFLOPS=0.830, K=[3 x 3], IN={1, 64, 75, 100}, OCN=96, PM=SAME, BIAS, OCV/CPU)|3.472|3.464|1.00|
|conv::Conv::(GFLOPS=0.958, K=[3 x 3], IN={1, 192, 38, 38}, OCN=192, PM=SAME, OCV/CPU)|4.302|4.322|1.00|
|conv::Conv::(GFLOPS=0.958, K=[3 x 3], IN={1, 384, 19, 19}, OCN=384, PM=SAME, OCV/CPU)|6.100|6.035|1.01|
|conv::Conv::(GFLOPS=1.022, K=[3 x 3], IN={1, 576, 19, 19}, OCN=273, PM=SAME, BIAS, OCV/CPU)|6.580|6.484|1.01|
|conv::Conv::(GFLOPS=1.112, K=[3 x 3], IN={1, 512, 10, 10}, OCN=1206, P=[1 x 1], BIAS, OCV/CPU)|9.741|9.634|1.01|
|conv::Conv::(GFLOPS=1.181, K=[3 x 3], IN={1, 64, 160, 200}, OCN=128, S=[2 x 2], P=[1 x 1], BIAS, OCV/CPU)|10.131|10.156|1.00|
|conv::Conv::(GFLOPS=1.182, K=[3 x 3], IN={1, 32, 320, 400}, OCN=64, S=[2 x 2], P=[1 x 1], BIAS, OCV/CPU)|12.391|12.350|1.00|
|conv::Conv::(GFLOPS=1.195, K=[9 x 9], IN={1, 32, 240, 320}, OCN=3, P=[4 x 4], BIAS, OCV/CPU)|91.074|87.893|1.04|
|conv::Conv::(GFLOPS=1.196, K=[3 x 3], IN={1, 384, 26, 26}, OCN=256, P=[1 x 1], OCV/CPU)|5.903|5.903|1.00|
|conv::Conv::(GFLOPS=1.210, K=[3 x 3], IN={1, 32, 256, 256}, OCN=32, PM=SAME, OCV/CPU)|6.890|6.794|1.01|
|conv::Conv::(GFLOPS=1.245, K=[3 x 3], IN={1, 64, 75, 75}, OCN=192, PM=SAME, BIAS, OCV/CPU)|5.160|5.131|1.01|
|conv::Conv::(GFLOPS=1.245, K=[3 x 3], IN={1, 96, 75, 100}, OCN=96, PM=SAME, BIAS, OCV/CPU)|4.970|5.036|0.99|
|conv::Conv::(GFLOPS=1.248, K=[3 x 3], IN={1, 256, 46, 46}, OCN=128, P=[1 x 1], BIAS, OCV/CPU)|5.045|5.015|1.01|
|conv::Conv::(GFLOPS=1.258, K=[3 x 3], IN={1, 1280, 10, 10}, OCN=546, PM=SAME, BIAS, OCV/CPU)|11.583|11.343|1.02|
|conv::Conv::(GFLOPS=1.261, K=[3 x 3], IN={1, 192, 38, 50}, OCN=192, PM=SAME, BIAS, OCV/CPU)|5.348|5.320|1.01|
|conv::Conv::(GFLOPS=1.416, K=[3 x 3], IN={1, 128, 62, 82}, OCN=128, BIAS, OCV/CPU)|5.357|5.396|0.99|
|conv::Conv::(GFLOPS=1.500, K=[3 x 3], IN={1, 128, 64, 84}, OCN=128, BIAS, OCV/CPU)|6.050|6.006|1.01|
|conv::Conv::(GFLOPS=1.586, K=[3 x 3], IN={1, 128, 66, 86}, OCN=128, BIAS, OCV/CPU)|5.952|5.953|1.00|
|conv::Conv::(GFLOPS=1.595, K=[3 x 3], IN={1, 256, 26, 26}, OCN=512, P=[1 x 1], OCV/CPU)|8.014|8.014|1.00|
|conv::Conv::(GFLOPS=1.595, K=[3 x 3], IN={1, 256, 52, 52}, OCN=512, S=[2 x 2], P=[1 x 1], OCV/CPU)|12.472|12.577|0.99|
|conv::Conv::(GFLOPS=1.595, K=[3 x 3], IN={1, 512, 13, 13}, OCN=1024, P=[1 x 1], OCV/CPU)|10.803|10.655|1.01|
|conv::Conv::(GFLOPS=1.595, K=[3 x 3], IN={1, 512, 26, 26}, OCN=1024, S=[2 x 2], P=[1 x 1], OCV/CPU)|18.429|13.405|1.37|
|conv::Conv::(GFLOPS=1.596, K=[3 x 3], IN={1, 64, 104, 104}, OCN=128, P=[1 x 1], OCV/CPU)|6.659|6.647|1.00|
|conv::Conv::(GFLOPS=1.596, K=[3 x 3], IN={1, 64, 208, 208}, OCN=128, S=[2 x 2], P=[1 x 1], OCV/CPU)|14.192|13.819|1.03|
|conv::Conv::(GFLOPS=1.596, K=[3 x 3], IN={1, 128, 52, 52}, OCN=256, P=[1 x 1], OCV/CPU)|6.045|6.068|1.00|
|conv::Conv::(GFLOPS=1.596, K=[3 x 3], IN={1, 128, 104, 104}, OCN=256, S=[2 x 2], P=[1 x 1], OCV/CPU)|12.742|12.828|0.99|
|conv::Conv::(GFLOPS=1.598, K=[3 x 3], IN={1, 32, 208, 208}, OCN=64, P=[1 x 1], OCV/CPU)|8.046|7.773|1.04|
|conv::Conv::(GFLOPS=1.598, K=[3 x 3], IN={1, 32, 416, 416}, OCN=64, S=[2 x 2], P=[1 x 1], OCV/CPU)|17.440|17.192|1.01|
|conv::Conv::(GFLOPS=1.659, K=[3 x 3], IN={1, 960, 10, 10}, OCN=960, PM=SAME, OCV/CPU)|15.418|14.972|1.03|
|conv::Conv::(GFLOPS=1.660, K=[3 x 3], IN={1, 128, 75, 75}, OCN=128, G=128, P=[1 x 1], BIAS, OCV/CPU)|0.430|0.430|1.00|
|conv::Conv::(GFLOPS=1.660, K=[3 x 3], IN={1, 128, 75, 75}, OCN=128, PM=SAME, OCV/CPU)|6.692|6.663|1.00|
|conv::Conv::(GFLOPS=1.675, K=[3 x 3], IN={1, 128, 68, 88}, OCN=128, BIAS, OCV/CPU)|6.350|6.347|1.00|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 256, 38, 38}, OCN=256, G=256, P=[1 x 1], BIAS, OCV/CPU)|0.267|0.265|1.01|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 256, 38, 38}, OCN=256, PM=SAME, OCV/CPU)|7.755|7.558|1.03|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 512, 19, 19}, OCN=512, G=512, P=[1 x 1], BIAS, OCV/CPU)|0.203|0.202|1.00|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 512, 19, 19}, OCN=512, P=[1 x 1], BIAS, OCV/CPU)|10.663|10.576|1.01|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 512, 19, 19}, OCN=512, PM=SAME, OCV/CPU)|10.827|10.614|1.02|
|conv::Conv::(GFLOPS=1.766, K=[3 x 3], IN={1, 128, 70, 90}, OCN=128, BIAS, OCV/CPU)|7.049|6.947|1.01|
|conv::Conv::(GFLOPS=1.859, K=[3 x 3], IN={1, 128, 72, 92}, OCN=128, BIAS, OCV/CPU)|6.900|6.901|1.00|
|conv::Conv::(GFLOPS=1.888, K=[3 x 3], IN={1, 1024, 10, 10}, OCN=1024, G=1024, P=[1 x 1], BIAS, OCV/CPU)|0.165|0.165|1.00|
|conv::Conv::(GFLOPS=1.888, K=[3 x 3], IN={1, 1024, 10, 10}, OCN=1024, PM=SAME, OCV/CPU)|17.953|17.251|1.04|
|conv::Conv::(GFLOPS=1.954, K=[3 x 3], IN={1, 128, 74, 94}, OCN=128, BIAS, OCV/CPU)|7.430|7.320|1.01|
|conv::Conv::(GFLOPS=1.995, K=[9 x 9], IN={1, 3, 320, 400}, OCN=32, P=[4 x 4], BIAS, OCV/CPU)|22.187|21.705|1.02|
|conv::Conv::(GFLOPS=2.052, K=[3 x 3], IN={1, 128, 76, 96}, OCN=128, BIAS, OCV/CPU)|8.349|8.126|1.03|
|conv::Conv::(GFLOPS=2.100, K=[3 x 3], IN={1, 144, 75, 75}, OCN=144, PM=SAME, OCV/CPU)|8.273|8.297|1.00|
|conv::Conv::(GFLOPS=2.153, K=[3 x 3], IN={1, 128, 78, 98}, OCN=128, BIAS, OCV/CPU)|8.169|8.094|1.01|
|conv::Conv::(GFLOPS=2.156, K=[3 x 3], IN={1, 576, 19, 19}, OCN=576, PM=SAME, OCV/CPU)|13.602|13.359|1.02|
|conv::Conv::(GFLOPS=2.255, K=[3 x 3], IN={1, 128, 80, 100}, OCN=128, BIAS, OCV/CPU)|8.633|8.584|1.01|
|conv::Conv::(GFLOPS=2.719, K=[3 x 3], IN={1, 96, 256, 256}, OCN=96, S=[2 x 2], PM=SAME, OCV/CPU)|29.339|28.897|1.02|
|conv::Conv::(GFLOPS=3.319, K=[3 x 3], IN={1, 128, 75, 75}, OCN=256, P=[1 x 1], BIAS, OCV/CPU)|13.000|12.920|1.01|
|conv::Conv::(GFLOPS=3.321, K=[3 x 3], IN={1, 64, 150, 150}, OCN=128, P=[1 x 1], BIAS, OCV/CPU)|14.262|13.319|1.07|
|conv::Conv::(GFLOPS=3.398, K=[7 x 7], IN={1, 128, 46, 46}, OCN=128, P=[3 x 3], BIAS, OCV/CPU)|27.453|27.253|1.01|
|conv::Conv::(GFLOPS=3.407, K=[3 x 3], IN={1, 512, 19, 19}, OCN=1024, D=[6 x 6], P=[6 x 6], BIAS, OCV/CPU)|32.052|27.269|1.18|
|conv::Conv::(GFLOPS=3.408, K=[3 x 3], IN={1, 256, 38, 38}, OCN=512, P=[1 x 1], BIAS, OCV/CPU)|15.363|15.208|1.01|
|conv::Conv::(GFLOPS=4.247, K=[3 x 3], IN={1, 480, 32, 32}, OCN=480, PM=SAME, OCV/CPU)|18.543|18.434|1.01|
|conv::Conv::(GFLOPS=4.247, K=[5 x 5], IN={1, 144, 128, 128}, OCN=144, S=[2 x 2], PM=SAME, OCV/CPU)|39.114|37.954|1.03|
|conv::Conv::(GFLOPS=4.566, K=[7 x 7], IN={1, 172, 46, 46}, OCN=128, P=[3 x 3], BIAS, OCV/CPU)|36.271|36.972|0.98|
|conv::Conv::(GFLOPS=4.993, K=[3 x 3], IN={1, 256, 46, 46}, OCN=512, P=[1 x 1], BIAS, OCV/CPU)|19.262|19.427|0.99|
|conv::Conv::(GFLOPS=4.993, K=[3 x 3], IN={1, 512, 46, 46}, OCN=256, P=[1 x 1], BIAS, OCV/CPU)|19.298|19.349|1.00|
|conv::Conv::(GFLOPS=4.994, K=[3 x 3], IN={1, 128, 92, 92}, OCN=256, P=[1 x 1], BIAS, OCV/CPU)|20.261|19.847|1.02|
|conv::Conv::(GFLOPS=4.997, K=[3 x 3], IN={1, 64, 184, 184}, OCN=128, P=[1 x 1], BIAS, OCV/CPU)|21.867|21.525|1.02|
|conv::Conv::(GFLOPS=5.780, K=[5 x 5], IN={1, 672, 32, 32}, OCN=672, S=[2 x 2], PM=SAME, OCV/CPU)|51.756|49.979|1.04|
|conv::Conv::(GFLOPS=6.116, K=[3 x 3], IN={1, 1152, 16, 16}, OCN=1152, PM=SAME, OCV/CPU)|28.133|27.060|1.04|
|conv::Conv::(GFLOPS=6.118, K=[3 x 3], IN={1, 144, 128, 128}, OCN=144, PM=SAME, OCV/CPU)|25.035|24.980|1.00|
|conv::Conv::(GFLOPS=6.637, K=[3 x 3], IN={1, 256, 75, 75}, OCN=256, P=[1 x 1], BIAS, OCV/CPU)|25.858|25.821|1.00|
|conv::Conv::(GFLOPS=6.638, K=[3 x 3], IN={1, 128, 150, 150}, OCN=128, P=[1 x 1], BIAS, OCV/CPU)|27.313|27.149|1.01|
|conv::Conv::(GFLOPS=6.641, K=[3 x 3], IN={1, 64, 150, 200}, OCN=192, PM=SAME, BIAS, OCV/CPU)|28.219|28.111|1.00|
|conv::Conv::(GFLOPS=6.641, K=[3 x 3], IN={1, 64, 300, 300}, OCN=64, P=[1 x 1], BIAS, OCV/CPU)|46.025|46.674|0.99|
|conv::Conv::(GFLOPS=6.814, K=[3 x 3], IN={1, 512, 38, 38}, OCN=512, P=[1 x 1], BIAS, OCV/CPU)|30.220|29.446|1.03|
|conv::Conv::(GFLOPS=8.025, K=[3 x 3], IN={1, 1024, 19, 19}, OCN=1206, P=[1 x 1], BIAS, OCV/CPU)|49.410|48.708|1.01|
|conv::Conv::(GFLOPS=9.986, K=[3 x 3], IN={1, 512, 46, 46}, OCN=512, P=[1 x 1], BIAS, OCV/CPU)|38.203|38.001|1.01|
|conv::Conv::(GFLOPS=9.987, K=[3 x 3], IN={1, 256, 92, 92}, OCN=256, P=[1 x 1], BIAS, OCV/CPU)|39.961|39.021|1.02|
|conv::Conv::(GFLOPS=9.989, K=[3 x 3], IN={1, 128, 184, 184}, OCN=128, P=[1 x 1], BIAS, OCV/CPU)|48.685|47.075|1.03|
|conv::Conv::(GFLOPS=9.993, K=[3 x 3], IN={1, 64, 368, 368}, OCN=64, P=[1 x 1], BIAS, OCV/CPU)|75.114|72.586|1.03|
|conv::Conv::(GFLOPS=10.087, K=[3 x 3], IN={1, 576, 38, 50}, OCN=512, PM=SAME, BIAS, OCV/CPU)|41.222|41.144|1.00|
|conv::Conv::(GFLOPS=10.701, K=[3 x 3], IN={1, 512, 38, 38}, OCN=804, P=[1 x 1], BIAS, OCV/CPU)|46.220|46.353|1.00|
|conv::Conv::(GFLOPS=11.797, K=[5 x 5], IN={1, 240, 64, 64}, OCN=240, PM=SAME, OCV/CPU)|98.201|98.771|0.99|
|conv::Conv::(GFLOPS=11.797, K=[5 x 5], IN={1, 480, 32, 32}, OCN=480, PM=SAME, OCV/CPU)|100.106|96.971|1.03|
|conv::Conv::(GFLOPS=16.987, K=[5 x 5], IN={1, 1152, 16, 16}, OCN=1152, PM=SAME, OCV/CPU)|146.977|140.445|1.05|
|conv::Conv::(GFLOPS=23.122, K=[5 x 5], IN={1, 672, 32, 32}, OCN=672, PM=SAME, OCV/CPU)|198.618|194.665|1.02|
#### Performance Test of ARM platform: apple M1, with `-perf_threas=1`
Min (ms)
|Name of Test|4.x|patch|4.x vs patch (x-factor)|
|---|:-:|:-:|:-:|
|conv1d::Conv1D::(GFLOPS=0.000, K=[3], IN={1, 2, 19}, OCN=2, G=2, S=2, P=(1, 1), BIAS, OCV/CPU)|0.001|0.001|1.07|
|conv1d::Conv1D::(GFLOPS=0.000, K=[3], IN={1, 2, 25}, OCN=2, G=2, P=(2, 2), PM=SAME, OCV/CPU)|0.001|0.001|1.10|
|conv1d::Conv1D::(GFLOPS=0.000, K=[3], IN={1, 6, 10}, OCN=6, PM=VALID, BIAS, OCV/CPU)|0.002|0.002|0.97|
|conv3d::Conv3D::(GFLOPS=0.000, K=[1 x 1 x 1], IN={1, 4, 9, 10, 10}, OCN=4, S=[1 x 1 x 2], P=(1, 1) x (1, 1) x (1, 1), PM=VALID, OCV/CPU)|0.003|0.003|0.84|
|conv3d::Conv3D::(GFLOPS=0.000, K=[1 x 1 x 1], IN={1, 8, 1, 10, 10}, OCN=8, G=8, P=(1, 1) x (1, 1) x (1, 1), BIAS, OCV/CPU)|0.009|0.009|1.00|
|conv3d::Conv3D::(GFLOPS=0.000, K=[3 x 3 x 3], IN={1, 2, 19, 19, 19}, OCN=2, G=2, S=[2 x 2 x 2], P=(1, 1) x (1, 1) x (1, 1), BIAS, OCV/CPU)|0.027|0.030|0.90|
|conv3d::Conv3D::(GFLOPS=0.000, K=[3 x 4 x 2], IN={1, 4, 8, 10, 10}, OCN=4, G=4, S=[1 x 2 x 1], BIAS, OCV/CPU)|0.008|0.007|1.07|
|conv3d::Conv3D::(GFLOPS=0.001, K=[3 x 3 x 3], IN={1, 2, 25, 19, 19}, OCN=2, G=2, S=[1 x 2 x 2], P=(2, 2) x (2, 2) x (2, 2), PM=SAME, OCV/CPU)|0.066|0.072|0.91|
|conv3d::Conv3D::(GFLOPS=0.002, K=[3 x 1 x 4], IN={1, 14, 5, 10, 10}, OCN=14, PM=SAME, OCV/CPU)|0.090|0.054|1.68|
|conv3d::Conv3D::(GFLOPS=0.006, K=[5 x 5 x 5], IN={1, 4, 50, 19, 19}, OCN=4, S=[2 x 2 x 2], P=(1, 1) x (1, 1) x (1, 1), PM=VALID, OCV/CPU)|0.328|0.409|0.80|
|conv3d::Conv3D::(GFLOPS=0.027, K=[3 x 3 x 3], IN={1, 6, 10, 38, 50}, OCN=6, PM=VALID, BIAS, OCV/CPU)|0.659|0.697|0.95|
|conv3d::Conv3D::(GFLOPS=0.030, K=[5 x 5 x 5], IN={1, 6, 19, 19, 19}, OCN=6, G=2, OCV/CPU)|1.266|1.403|0.90|
|conv3d::Conv3D::(GFLOPS=0.045, K=[7 x 7 x 7], IN={1, 2, 38, 38, 38}, OCN=2, S=[1 x 2 x 1], OCV/CPU)|3.550|4.145|0.86|
|conv3d::Conv3D::(GFLOPS=0.053, K=[3 x 3 x 3], IN={1, 10, 98, 10, 10}, OCN=10, PM=SAME, OCV/CPU)|1.188|1.375|0.86|
|conv3d::Conv3D::(GFLOPS=0.071, K=[7 x 7 x 7], IN={1, 6, 15, 19, 19}, OCN=6, S=[2 x 1 x 1], P=(3, 3) x (3, 3) x (3, 3), PM=SAME, BIAS, OCV/CPU)|2.683|3.236|0.83|
|conv3d::Conv3D::(GFLOPS=0.093, K=[5 x 5 x 5], IN={1, 4, 40, 75, 75}, OCN=4, S=[2 x 2 x 2], OCV/CPU)|4.491|5.501|0.82|
|conv3d::Conv3D::(GFLOPS=0.116, K=[5 x 5 x 5], IN={1, 2, 21, 75, 100}, OCN=2, BIAS, OCV/CPU)|8.916|10.181|0.88|
|conv3d::Conv3D::(GFLOPS=1.267, K=[5 x 5 x 5], IN={1, 3, 75, 75, 100}, OCN=3, PM=SAME, BIAS, OCV/CPU)|69.995|72.296|0.97|
|conv3d::Conv3D::(GFLOPS=1.343, K=[3 x 3 x 3], IN={1, 11, 9, 150, 200}, OCN=11, PM=VALID, BIAS, OCV/CPU)|22.531|23.139|0.97|
|conv::Conv::(GFLOPS=0.177, K=[1 x 1], IN={1, 512, 26, 26}, OCN=256, OCV/CPU)|2.239|1.933|1.16|
|conv::Conv::(GFLOPS=0.177, K=[1 x 1], IN={1, 512, 26, 26}, OCN=256, OCV/CPU_FP16)|-|1.010|-|
|conv::Conv::(GFLOPS=0.177, K=[1 x 1], IN={1, 1024, 13, 13}, OCN=512, OCV/CPU)|3.134|2.068|1.52|
|conv::Conv::(GFLOPS=0.177, K=[1 x 1], IN={1, 1024, 13, 13}, OCN=512, OCV/CPU_FP16)|-|1.062|-|
|conv::Conv::(GFLOPS=0.178, K=[1 x 1], IN={1, 256, 52, 52}, OCN=128, OCV/CPU)|1.918|1.920|1.00|
|conv::Conv::(GFLOPS=0.178, K=[1 x 1], IN={1, 256, 52, 52}, OCN=128, OCV/CPU_FP16)|-|1.014|-|
|conv::Conv::(GFLOPS=0.210, K=[1 x 1], IN={1, 576, 38, 50}, OCN=96, PM=SAME, BIAS, OCV/CPU)|2.340|2.352|0.99|
|conv::Conv::(GFLOPS=0.210, K=[1 x 1], IN={1, 576, 38, 50}, OCN=96, PM=SAME, BIAS, OCV/CPU_FP16)|-|1.247|-|
|conv::Conv::(GFLOPS=0.231, K=[3 x 3], IN={1, 128, 56, 56}, OCN=32, P=[1 x 1], OCV/CPU)|1.116|1.111|1.00|
|conv::Conv::(GFLOPS=0.231, K=[3 x 3], IN={1, 128, 56, 56}, OCN=32, P=[1 x 1], OCV/CPU_FP16)|-|1.114|-|
|conv::Conv::(GFLOPS=0.231, K=[3 x 3], IN={1, 256, 14, 14}, OCN=256, P=[1 x 1], OCV/CPU)|1.116|1.112|1.00|
|conv::Conv::(GFLOPS=0.231, K=[3 x 3], IN={1, 256, 14, 14}, OCN=256, P=[1 x 1], OCV/CPU_FP16)|-|1.113|-|
|conv::Conv::(GFLOPS=0.280, K=[1 x 1], IN={1, 576, 38, 50}, OCN=128, PM=SAME, BIAS, OCV/CPU)|3.067|3.085|0.99|
|conv::Conv::(GFLOPS=0.280, K=[1 x 1], IN={1, 576, 38, 50}, OCN=128, PM=SAME, BIAS, OCV/CPU_FP16)|-|1.622|-|
|conv::Conv::(GFLOPS=0.302, K=[3 x 3], IN={1, 64, 64, 64}, OCN=64, PM=SAME, OCV/CPU)|1.153|1.187|0.97|
|conv::Conv::(GFLOPS=0.302, K=[3 x 3], IN={1, 64, 64, 64}, OCN=64, PM=SAME, OCV/CPU_FP16)|-|1.150|-|
|conv::Conv::(GFLOPS=0.357, K=[1 x 1], IN={1, 64, 208, 208}, OCN=64, OCV/CPU)|4.804|4.849|0.99|
|conv::Conv::(GFLOPS=0.357, K=[1 x 1], IN={1, 64, 208, 208}, OCN=64, OCV/CPU_FP16)|-|2.922|-|
|conv::Conv::(GFLOPS=0.420, K=[3 x 3], IN={1, 96, 38, 50}, OCN=128, PM=SAME, BIAS, OCV/CPU)|1.463|1.469|1.00|
|conv::Conv::(GFLOPS=0.420, K=[3 x 3], IN={1, 96, 38, 50}, OCN=128, PM=SAME, BIAS, OCV/CPU_FP16)|-|1.459|-|
|conv::Conv::(GFLOPS=0.472, K=[3 x 3], IN={1, 128, 40, 40}, OCN=128, PM=SAME, OCV/CPU)|1.577|1.580|1.00|
|conv::Conv::(GFLOPS=0.472, K=[3 x 3], IN={1, 128, 40, 40}, OCN=128, PM=SAME, OCV/CPU_FP16)|-|1.580|-|
|conv::Conv::(GFLOPS=0.472, K=[3 x 3], IN={1, 256, 20, 20}, OCN=256, PM=SAME, OCV/CPU)|1.826|1.818|1.00|
|conv::Conv::(GFLOPS=0.472, K=[3 x 3], IN={1, 256, 20, 20}, OCN=256, PM=SAME, OCV/CPU_FP16)|-|1.817|-|
|conv::Conv::(GFLOPS=0.472, K=[3 x 3], IN={1, 512, 10, 10}, OCN=512, PM=SAME, OCV/CPU)|6.541|5.081|1.29|
|conv::Conv::(GFLOPS=0.472, K=[3 x 3], IN={1, 512, 10, 10}, OCN=512, PM=SAME, OCV/CPU_FP16)|-|2.809|-|
|conv::Conv::(GFLOPS=0.561, K=[3 x 3], IN={1, 128, 38, 50}, OCN=128, PM=SAME, BIAS, OCV/CPU)|1.912|1.919|1.00|
|conv::Conv::(GFLOPS=0.561, K=[3 x 3], IN={1, 128, 38, 50}, OCN=128, PM=SAME, BIAS, OCV/CPU_FP16)|-|1.919|-|
|conv::Conv::(GFLOPS=0.624, K=[3 x 3], IN={1, 128, 46, 46}, OCN=128, P=[1 x 1], BIAS, OCV/CPU)|1.961|1.971|0.99|
|conv::Conv::(GFLOPS=0.624, K=[3 x 3], IN={1, 128, 46, 46}, OCN=128, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|1.961|-|
|conv::Conv::(GFLOPS=0.701, K=[3 x 3], IN={1, 128, 38, 50}, OCN=160, PM=SAME, BIAS, OCV/CPU)|2.317|2.329|0.99|
|conv::Conv::(GFLOPS=0.701, K=[3 x 3], IN={1, 128, 38, 50}, OCN=160, PM=SAME, BIAS, OCV/CPU_FP16)|-|2.322|-|
|conv::Conv::(GFLOPS=0.798, K=[3 x 3], IN={1, 64, 104, 104}, OCN=64, P=[1 x 1], OCV/CPU)|2.920|2.947|0.99|
|conv::Conv::(GFLOPS=0.798, K=[3 x 3], IN={1, 64, 104, 104}, OCN=64, P=[1 x 1], OCV/CPU_FP16)|-|2.924|-|
|conv::Conv::(GFLOPS=0.798, K=[3 x 3], IN={1, 128, 52, 52}, OCN=128, P=[1 x 1], OCV/CPU)|2.467|2.466|1.00|
|conv::Conv::(GFLOPS=0.798, K=[3 x 3], IN={1, 128, 52, 52}, OCN=128, P=[1 x 1], OCV/CPU_FP16)|-|2.496|-|
|conv::Conv::(GFLOPS=0.798, K=[3 x 3], IN={1, 256, 26, 26}, OCN=256, P=[1 x 1], OCV/CPU)|3.028|2.997|1.01|
|conv::Conv::(GFLOPS=0.798, K=[3 x 3], IN={1, 256, 26, 26}, OCN=256, P=[1 x 1], OCV/CPU_FP16)|-|2.986|-|
|conv::Conv::(GFLOPS=0.798, K=[3 x 3], IN={1, 512, 13, 13}, OCN=512, P=[1 x 1], OCV/CPU)|4.353|4.355|1.00|
|conv::Conv::(GFLOPS=0.798, K=[3 x 3], IN={1, 512, 13, 13}, OCN=512, P=[1 x 1], OCV/CPU_FP16)|-|4.355|-|
|conv::Conv::(GFLOPS=0.830, K=[3 x 3], IN={1, 64, 75, 100}, OCN=96, PM=SAME, BIAS, OCV/CPU)|2.762|2.793|0.99|
|conv::Conv::(GFLOPS=0.830, K=[3 x 3], IN={1, 64, 75, 100}, OCN=96, PM=SAME, BIAS, OCV/CPU_FP16)|-|2.797|-|
|conv::Conv::(GFLOPS=0.958, K=[3 x 3], IN={1, 192, 38, 38}, OCN=192, PM=SAME, OCV/CPU)|3.428|3.226|1.06|
|conv::Conv::(GFLOPS=0.958, K=[3 x 3], IN={1, 192, 38, 38}, OCN=192, PM=SAME, OCV/CPU_FP16)|-|3.223|-|
|conv::Conv::(GFLOPS=0.958, K=[3 x 3], IN={1, 384, 19, 19}, OCN=384, PM=SAME, OCV/CPU)|3.967|3.957|1.00|
|conv::Conv::(GFLOPS=0.958, K=[3 x 3], IN={1, 384, 19, 19}, OCN=384, PM=SAME, OCV/CPU_FP16)|-|3.960|-|
|conv::Conv::(GFLOPS=1.022, K=[3 x 3], IN={1, 576, 19, 19}, OCN=273, PM=SAME, BIAS, OCV/CPU)|4.806|4.387|1.10|
|conv::Conv::(GFLOPS=1.022, K=[3 x 3], IN={1, 576, 19, 19}, OCN=273, PM=SAME, BIAS, OCV/CPU_FP16)|-|4.366|-|
|conv::Conv::(GFLOPS=1.112, K=[3 x 3], IN={1, 512, 10, 10}, OCN=1206, P=[1 x 1], BIAS, OCV/CPU)|14.509|11.756|1.23|
|conv::Conv::(GFLOPS=1.112, K=[3 x 3], IN={1, 512, 10, 10}, OCN=1206, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|6.510|-|
|conv::Conv::(GFLOPS=1.181, K=[3 x 3], IN={1, 64, 160, 200}, OCN=128, S=[2 x 2], P=[1 x 1], BIAS, OCV/CPU)|13.718|13.287|1.03|
|conv::Conv::(GFLOPS=1.181, K=[3 x 3], IN={1, 64, 160, 200}, OCN=128, S=[2 x 2], P=[1 x 1], BIAS, OCV/CPU_FP16)|-|7.190|-|
|conv::Conv::(GFLOPS=1.182, K=[3 x 3], IN={1, 32, 320, 400}, OCN=64, S=[2 x 2], P=[1 x 1], BIAS, OCV/CPU)|15.133|14.853|1.02|
|conv::Conv::(GFLOPS=1.182, K=[3 x 3], IN={1, 32, 320, 400}, OCN=64, S=[2 x 2], P=[1 x 1], BIAS, OCV/CPU_FP16)|-|8.671|-|
|conv::Conv::(GFLOPS=1.195, K=[9 x 9], IN={1, 32, 240, 320}, OCN=3, P=[4 x 4], BIAS, OCV/CPU)|41.928|43.328|0.97|
|conv::Conv::(GFLOPS=1.195, K=[9 x 9], IN={1, 32, 240, 320}, OCN=3, P=[4 x 4], BIAS, OCV/CPU_FP16)|-|38.072|-|
|conv::Conv::(GFLOPS=1.196, K=[3 x 3], IN={1, 384, 26, 26}, OCN=256, P=[1 x 1], OCV/CPU)|4.409|4.428|1.00|
|conv::Conv::(GFLOPS=1.196, K=[3 x 3], IN={1, 384, 26, 26}, OCN=256, P=[1 x 1], OCV/CPU_FP16)|-|4.427|-|
|conv::Conv::(GFLOPS=1.210, K=[3 x 3], IN={1, 32, 256, 256}, OCN=32, PM=SAME, OCV/CPU)|6.144|5.363|1.15|
|conv::Conv::(GFLOPS=1.210, K=[3 x 3], IN={1, 32, 256, 256}, OCN=32, PM=SAME, OCV/CPU_FP16)|-|5.368|-|
|conv::Conv::(GFLOPS=1.245, K=[3 x 3], IN={1, 64, 75, 75}, OCN=192, PM=SAME, BIAS, OCV/CPU)|3.926|3.932|1.00|
|conv::Conv::(GFLOPS=1.245, K=[3 x 3], IN={1, 64, 75, 75}, OCN=192, PM=SAME, BIAS, OCV/CPU_FP16)|-|3.938|-|
|conv::Conv::(GFLOPS=1.245, K=[3 x 3], IN={1, 96, 75, 100}, OCN=96, PM=SAME, BIAS, OCV/CPU)|3.920|3.915|1.00|
|conv::Conv::(GFLOPS=1.245, K=[3 x 3], IN={1, 96, 75, 100}, OCN=96, PM=SAME, BIAS, OCV/CPU_FP16)|-|3.950|-|
|conv::Conv::(GFLOPS=1.248, K=[3 x 3], IN={1, 256, 46, 46}, OCN=128, P=[1 x 1], BIAS, OCV/CPU)|3.767|3.764|1.00|
|conv::Conv::(GFLOPS=1.248, K=[3 x 3], IN={1, 256, 46, 46}, OCN=128, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|3.762|-|
|conv::Conv::(GFLOPS=1.258, K=[3 x 3], IN={1, 1280, 10, 10}, OCN=546, PM=SAME, BIAS, OCV/CPU)|19.959|13.875|1.44|
|conv::Conv::(GFLOPS=1.258, K=[3 x 3], IN={1, 1280, 10, 10}, OCN=546, PM=SAME, BIAS, OCV/CPU_FP16)|-|7.781|-|
|conv::Conv::(GFLOPS=1.261, K=[3 x 3], IN={1, 192, 38, 50}, OCN=192, PM=SAME, BIAS, OCV/CPU)|3.951|3.955|1.00|
|conv::Conv::(GFLOPS=1.261, K=[3 x 3], IN={1, 192, 38, 50}, OCN=192, PM=SAME, BIAS, OCV/CPU_FP16)|-|3.969|-|
|conv::Conv::(GFLOPS=1.416, K=[3 x 3], IN={1, 128, 62, 82}, OCN=128, BIAS, OCV/CPU)|4.050|4.034|1.00|
|conv::Conv::(GFLOPS=1.416, K=[3 x 3], IN={1, 128, 62, 82}, OCN=128, BIAS, OCV/CPU_FP16)|-|4.093|-|
|conv::Conv::(GFLOPS=1.500, K=[3 x 3], IN={1, 128, 64, 84}, OCN=128, BIAS, OCV/CPU)|4.923|4.506|1.09|
|conv::Conv::(GFLOPS=1.500, K=[3 x 3], IN={1, 128, 64, 84}, OCN=128, BIAS, OCV/CPU_FP16)|-|4.509|-|
|conv::Conv::(GFLOPS=1.586, K=[3 x 3], IN={1, 128, 66, 86}, OCN=128, BIAS, OCV/CPU)|4.759|4.476|1.06|
|conv::Conv::(GFLOPS=1.586, K=[3 x 3], IN={1, 128, 66, 86}, OCN=128, BIAS, OCV/CPU_FP16)|-|4.447|-|
|conv::Conv::(GFLOPS=1.595, K=[3 x 3], IN={1, 256, 26, 26}, OCN=512, P=[1 x 1], OCV/CPU)|6.079|5.628|1.08|
|conv::Conv::(GFLOPS=1.595, K=[3 x 3], IN={1, 256, 26, 26}, OCN=512, P=[1 x 1], OCV/CPU_FP16)|-|5.625|-|
|conv::Conv::(GFLOPS=1.595, K=[3 x 3], IN={1, 256, 52, 52}, OCN=512, S=[2 x 2], P=[1 x 1], OCV/CPU)|19.843|17.523|1.13|
|conv::Conv::(GFLOPS=1.595, K=[3 x 3], IN={1, 256, 52, 52}, OCN=512, S=[2 x 2], P=[1 x 1], OCV/CPU_FP16)|-|8.917|-|
|conv::Conv::(GFLOPS=1.595, K=[3 x 3], IN={1, 512, 13, 13}, OCN=1024, P=[1 x 1], OCV/CPU)|8.334|8.247|1.01|
|conv::Conv::(GFLOPS=1.595, K=[3 x 3], IN={1, 512, 13, 13}, OCN=1024, P=[1 x 1], OCV/CPU_FP16)|-|8.246|-|
|conv::Conv::(GFLOPS=1.595, K=[3 x 3], IN={1, 512, 26, 26}, OCN=1024, S=[2 x 2], P=[1 x 1], OCV/CPU)|23.164|18.199|1.27|
|conv::Conv::(GFLOPS=1.595, K=[3 x 3], IN={1, 512, 26, 26}, OCN=1024, S=[2 x 2], P=[1 x 1], OCV/CPU_FP16)|-|9.305|-|
|conv::Conv::(GFLOPS=1.596, K=[3 x 3], IN={1, 64, 104, 104}, OCN=128, P=[1 x 1], OCV/CPU)|5.184|5.178|1.00|
|conv::Conv::(GFLOPS=1.596, K=[3 x 3], IN={1, 64, 104, 104}, OCN=128, P=[1 x 1], OCV/CPU_FP16)|-|5.149|-|
|conv::Conv::(GFLOPS=1.596, K=[3 x 3], IN={1, 64, 208, 208}, OCN=128, S=[2 x 2], P=[1 x 1], OCV/CPU)|17.990|18.103|0.99|
|conv::Conv::(GFLOPS=1.596, K=[3 x 3], IN={1, 64, 208, 208}, OCN=128, S=[2 x 2], P=[1 x 1], OCV/CPU_FP16)|-|9.777|-|
|conv::Conv::(GFLOPS=1.596, K=[3 x 3], IN={1, 128, 52, 52}, OCN=256, P=[1 x 1], OCV/CPU)|4.831|4.522|1.07|
|conv::Conv::(GFLOPS=1.596, K=[3 x 3], IN={1, 128, 52, 52}, OCN=256, P=[1 x 1], OCV/CPU_FP16)|-|4.523|-|
|conv::Conv::(GFLOPS=1.596, K=[3 x 3], IN={1, 128, 104, 104}, OCN=256, S=[2 x 2], P=[1 x 1], OCV/CPU)|17.328|17.319|1.00|
|conv::Conv::(GFLOPS=1.596, K=[3 x 3], IN={1, 128, 104, 104}, OCN=256, S=[2 x 2], P=[1 x 1], OCV/CPU_FP16)|-|8.948|-|
|conv::Conv::(GFLOPS=1.598, K=[3 x 3], IN={1, 32, 208, 208}, OCN=64, P=[1 x 1], OCV/CPU)|5.944|5.961|1.00|
|conv::Conv::(GFLOPS=1.598, K=[3 x 3], IN={1, 32, 208, 208}, OCN=64, P=[1 x 1], OCV/CPU_FP16)|-|5.936|-|
|conv::Conv::(GFLOPS=1.598, K=[3 x 3], IN={1, 32, 416, 416}, OCN=64, S=[2 x 2], P=[1 x 1], OCV/CPU)|19.811|20.064|0.99|
|conv::Conv::(GFLOPS=1.598, K=[3 x 3], IN={1, 32, 416, 416}, OCN=64, S=[2 x 2], P=[1 x 1], OCV/CPU_FP16)|-|11.705|-|
|conv::Conv::(GFLOPS=1.659, K=[3 x 3], IN={1, 960, 10, 10}, OCN=960, PM=SAME, OCV/CPU)|22.398|17.686|1.27|
|conv::Conv::(GFLOPS=1.659, K=[3 x 3], IN={1, 960, 10, 10}, OCN=960, PM=SAME, OCV/CPU_FP16)|-|9.859|-|
|conv::Conv::(GFLOPS=1.660, K=[3 x 3], IN={1, 128, 75, 75}, OCN=128, G=128, P=[1 x 1], BIAS, OCV/CPU)|0.416|0.416|1.00|
|conv::Conv::(GFLOPS=1.660, K=[3 x 3], IN={1, 128, 75, 75}, OCN=128, G=128, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|0.417|-|
|conv::Conv::(GFLOPS=1.660, K=[3 x 3], IN={1, 128, 75, 75}, OCN=128, PM=SAME, OCV/CPU)|5.356|5.110|1.05|
|conv::Conv::(GFLOPS=1.660, K=[3 x 3], IN={1, 128, 75, 75}, OCN=128, PM=SAME, OCV/CPU_FP16)|-|5.114|-|
|conv::Conv::(GFLOPS=1.675, K=[3 x 3], IN={1, 128, 68, 88}, OCN=128, BIAS, OCV/CPU)|5.092|4.748|1.07|
|conv::Conv::(GFLOPS=1.675, K=[3 x 3], IN={1, 128, 68, 88}, OCN=128, BIAS, OCV/CPU_FP16)|-|4.754|-|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 256, 38, 38}, OCN=256, G=256, P=[1 x 1], BIAS, OCV/CPU)|0.260|0.229|1.13|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 256, 38, 38}, OCN=256, G=256, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|0.229|-|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 256, 38, 38}, OCN=256, PM=SAME, OCV/CPU)|5.872|5.460|1.08|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 256, 38, 38}, OCN=256, PM=SAME, OCV/CPU_FP16)|-|5.460|-|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 512, 19, 19}, OCN=512, G=512, P=[1 x 1], BIAS, OCV/CPU)|0.161|0.161|1.00|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 512, 19, 19}, OCN=512, G=512, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|0.161|-|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 512, 19, 19}, OCN=512, P=[1 x 1], BIAS, OCV/CPU)|7.176|7.175|1.00|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 512, 19, 19}, OCN=512, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|7.162|-|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 512, 19, 19}, OCN=512, PM=SAME, OCV/CPU)|7.174|7.185|1.00|
|conv::Conv::(GFLOPS=1.704, K=[3 x 3], IN={1, 512, 19, 19}, OCN=512, PM=SAME, OCV/CPU_FP16)|-|7.157|-|
|conv::Conv::(GFLOPS=1.766, K=[3 x 3], IN={1, 128, 70, 90}, OCN=128, BIAS, OCV/CPU)|5.400|5.180|1.04|
|conv::Conv::(GFLOPS=1.766, K=[3 x 3], IN={1, 128, 70, 90}, OCN=128, BIAS, OCV/CPU_FP16)|-|5.201|-|
|conv::Conv::(GFLOPS=1.859, K=[3 x 3], IN={1, 128, 72, 92}, OCN=128, BIAS, OCV/CPU)|5.330|5.188|1.03|
|conv::Conv::(GFLOPS=1.859, K=[3 x 3], IN={1, 128, 72, 92}, OCN=128, BIAS, OCV/CPU_FP16)|-|5.177|-|
|conv::Conv::(GFLOPS=1.888, K=[3 x 3], IN={1, 1024, 10, 10}, OCN=1024, G=1024, P=[1 x 1], BIAS, OCV/CPU)|0.115|0.115|1.00|
|conv::Conv::(GFLOPS=1.888, K=[3 x 3], IN={1, 1024, 10, 10}, OCN=1024, G=1024, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|0.115|-|
|conv::Conv::(GFLOPS=1.888, K=[3 x 3], IN={1, 1024, 10, 10}, OCN=1024, PM=SAME, OCV/CPU)|26.156|20.222|1.29|
|conv::Conv::(GFLOPS=1.888, K=[3 x 3], IN={1, 1024, 10, 10}, OCN=1024, PM=SAME, OCV/CPU_FP16)|-|11.203|-|
|conv::Conv::(GFLOPS=1.954, K=[3 x 3], IN={1, 128, 74, 94}, OCN=128, BIAS, OCV/CPU)|5.627|5.543|1.02|
|conv::Conv::(GFLOPS=1.954, K=[3 x 3], IN={1, 128, 74, 94}, OCN=128, BIAS, OCV/CPU_FP16)|-|5.506|-|
|conv::Conv::(GFLOPS=1.995, K=[9 x 9], IN={1, 3, 320, 400}, OCN=32, P=[4 x 4], BIAS, OCV/CPU)|27.925|27.741|1.01|
|conv::Conv::(GFLOPS=1.995, K=[9 x 9], IN={1, 3, 320, 400}, OCN=32, P=[4 x 4], BIAS, OCV/CPU_FP16)|-|17.217|-|
|conv::Conv::(GFLOPS=2.052, K=[3 x 3], IN={1, 128, 76, 96}, OCN=128, BIAS, OCV/CPU)|6.359|6.062|1.05|
|conv::Conv::(GFLOPS=2.052, K=[3 x 3], IN={1, 128, 76, 96}, OCN=128, BIAS, OCV/CPU_FP16)|-|6.048|-|
|conv::Conv::(GFLOPS=2.100, K=[3 x 3], IN={1, 144, 75, 75}, OCN=144, PM=SAME, OCV/CPU)|6.559|6.322|1.04|
|conv::Conv::(GFLOPS=2.100, K=[3 x 3], IN={1, 144, 75, 75}, OCN=144, PM=SAME, OCV/CPU_FP16)|-|6.280|-|
|conv::Conv::(GFLOPS=2.153, K=[3 x 3], IN={1, 128, 78, 98}, OCN=128, BIAS, OCV/CPU)|6.412|6.200|1.03|
|conv::Conv::(GFLOPS=2.153, K=[3 x 3], IN={1, 128, 78, 98}, OCN=128, BIAS, OCV/CPU_FP16)|-|6.197|-|
|conv::Conv::(GFLOPS=2.156, K=[3 x 3], IN={1, 576, 19, 19}, OCN=576, PM=SAME, OCV/CPU)|9.167|8.624|1.06|
|conv::Conv::(GFLOPS=2.156, K=[3 x 3], IN={1, 576, 19, 19}, OCN=576, PM=SAME, OCV/CPU_FP16)|-|8.626|-|
|conv::Conv::(GFLOPS=2.255, K=[3 x 3], IN={1, 128, 80, 100}, OCN=128, BIAS, OCV/CPU)|6.755|6.491|1.04|
|conv::Conv::(GFLOPS=2.255, K=[3 x 3], IN={1, 128, 80, 100}, OCN=128, BIAS, OCV/CPU_FP16)|-|6.520|-|
|conv::Conv::(GFLOPS=2.719, K=[3 x 3], IN={1, 96, 256, 256}, OCN=96, S=[2 x 2], PM=SAME, OCV/CPU)|35.664|34.752|1.03|
|conv::Conv::(GFLOPS=2.719, K=[3 x 3], IN={1, 96, 256, 256}, OCN=96, S=[2 x 2], PM=SAME, OCV/CPU_FP16)|-|20.260|-|
|conv::Conv::(GFLOPS=3.319, K=[3 x 3], IN={1, 128, 75, 75}, OCN=256, P=[1 x 1], BIAS, OCV/CPU)|9.514|9.414|1.01|
|conv::Conv::(GFLOPS=3.319, K=[3 x 3], IN={1, 128, 75, 75}, OCN=256, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|9.462|-|
|conv::Conv::(GFLOPS=3.321, K=[3 x 3], IN={1, 64, 150, 150}, OCN=128, P=[1 x 1], BIAS, OCV/CPU)|10.631|9.963|1.07|
|conv::Conv::(GFLOPS=3.321, K=[3 x 3], IN={1, 64, 150, 150}, OCN=128, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|9.935|-|
|conv::Conv::(GFLOPS=3.398, K=[7 x 7], IN={1, 128, 46, 46}, OCN=128, P=[3 x 3], BIAS, OCV/CPU)|37.465|36.798|1.02|
|conv::Conv::(GFLOPS=3.398, K=[7 x 7], IN={1, 128, 46, 46}, OCN=128, P=[3 x 3], BIAS, OCV/CPU_FP16)|-|19.569|-|
|conv::Conv::(GFLOPS=3.407, K=[3 x 3], IN={1, 512, 19, 19}, OCN=1024, D=[6 x 6], P=[6 x 6], BIAS, OCV/CPU)|38.157|36.157|1.06|
|conv::Conv::(GFLOPS=3.407, K=[3 x 3], IN={1, 512, 19, 19}, OCN=1024, D=[6 x 6], P=[6 x 6], BIAS, OCV/CPU_FP16)|-|18.902|-|
|conv::Conv::(GFLOPS=3.408, K=[3 x 3], IN={1, 256, 38, 38}, OCN=512, P=[1 x 1], BIAS, OCV/CPU)|10.356|10.401|1.00|
|conv::Conv::(GFLOPS=3.408, K=[3 x 3], IN={1, 256, 38, 38}, OCN=512, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|10.360|-|
|conv::Conv::(GFLOPS=4.247, K=[3 x 3], IN={1, 480, 32, 32}, OCN=480, PM=SAME, OCV/CPU)|12.641|12.150|1.04|
|conv::Conv::(GFLOPS=4.247, K=[3 x 3], IN={1, 480, 32, 32}, OCN=480, PM=SAME, OCV/CPU_FP16)|-|12.162|-|
|conv::Conv::(GFLOPS=4.247, K=[5 x 5], IN={1, 144, 128, 128}, OCN=144, S=[2 x 2], PM=SAME, OCV/CPU)|50.545|50.505|1.00|
|conv::Conv::(GFLOPS=4.247, K=[5 x 5], IN={1, 144, 128, 128}, OCN=144, S=[2 x 2], PM=SAME, OCV/CPU_FP16)|-|27.950|-|
|conv::Conv::(GFLOPS=4.566, K=[7 x 7], IN={1, 172, 46, 46}, OCN=128, P=[3 x 3], BIAS, OCV/CPU)|54.233|49.603|1.09|
|conv::Conv::(GFLOPS=4.566, K=[7 x 7], IN={1, 172, 46, 46}, OCN=128, P=[3 x 3], BIAS, OCV/CPU_FP16)|-|26.515|-|
|conv::Conv::(GFLOPS=4.993, K=[3 x 3], IN={1, 256, 46, 46}, OCN=512, P=[1 x 1], BIAS, OCV/CPU)|13.779|12.968|1.06|
|conv::Conv::(GFLOPS=4.993, K=[3 x 3], IN={1, 256, 46, 46}, OCN=512, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|12.984|-|
|conv::Conv::(GFLOPS=4.993, K=[3 x 3], IN={1, 512, 46, 46}, OCN=256, P=[1 x 1], BIAS, OCV/CPU)|15.809|15.329|1.03|
|conv::Conv::(GFLOPS=4.993, K=[3 x 3], IN={1, 512, 46, 46}, OCN=256, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|15.433|-|
|conv::Conv::(GFLOPS=4.994, K=[3 x 3], IN={1, 128, 92, 92}, OCN=256, P=[1 x 1], BIAS, OCV/CPU)|14.563|14.527|1.00|
|conv::Conv::(GFLOPS=4.994, K=[3 x 3], IN={1, 128, 92, 92}, OCN=256, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|14.480|-|
|conv::Conv::(GFLOPS=4.997, K=[3 x 3], IN={1, 64, 184, 184}, OCN=128, P=[1 x 1], BIAS, OCV/CPU)|16.714|16.484|1.01|
|conv::Conv::(GFLOPS=4.997, K=[3 x 3], IN={1, 64, 184, 184}, OCN=128, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|16.362|-|
|conv::Conv::(GFLOPS=5.780, K=[5 x 5], IN={1, 672, 32, 32}, OCN=672, S=[2 x 2], PM=SAME, OCV/CPU)|77.832|65.729|1.18|
|conv::Conv::(GFLOPS=5.780, K=[5 x 5], IN={1, 672, 32, 32}, OCN=672, S=[2 x 2], PM=SAME, OCV/CPU_FP16)|-|32.065|-|
|conv::Conv::(GFLOPS=6.116, K=[3 x 3], IN={1, 1152, 16, 16}, OCN=1152, PM=SAME, OCV/CPU)|21.903|20.386|1.07|
|conv::Conv::(GFLOPS=6.116, K=[3 x 3], IN={1, 1152, 16, 16}, OCN=1152, PM=SAME, OCV/CPU_FP16)|-|20.416|-|
|conv::Conv::(GFLOPS=6.118, K=[3 x 3], IN={1, 144, 128, 128}, OCN=144, PM=SAME, OCV/CPU)|20.405|18.148|1.12|
|conv::Conv::(GFLOPS=6.118, K=[3 x 3], IN={1, 144, 128, 128}, OCN=144, PM=SAME, OCV/CPU_FP16)|-|18.128|-|
|conv::Conv::(GFLOPS=6.637, K=[3 x 3], IN={1, 256, 75, 75}, OCN=256, P=[1 x 1], BIAS, OCV/CPU)|20.334|18.521|1.10|
|conv::Conv::(GFLOPS=6.637, K=[3 x 3], IN={1, 256, 75, 75}, OCN=256, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|18.495|-|
|conv::Conv::(GFLOPS=6.638, K=[3 x 3], IN={1, 128, 150, 150}, OCN=128, P=[1 x 1], BIAS, OCV/CPU)|21.527|19.584|1.10|
|conv::Conv::(GFLOPS=6.638, K=[3 x 3], IN={1, 128, 150, 150}, OCN=128, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|19.630|-|
|conv::Conv::(GFLOPS=6.641, K=[3 x 3], IN={1, 64, 150, 200}, OCN=192, PM=SAME, BIAS, OCV/CPU)|22.715|20.057|1.13|
|conv::Conv::(GFLOPS=6.641, K=[3 x 3], IN={1, 64, 150, 200}, OCN=192, PM=SAME, BIAS, OCV/CPU_FP16)|-|20.068|-|
|conv::Conv::(GFLOPS=6.641, K=[3 x 3], IN={1, 64, 300, 300}, OCN=64, P=[1 x 1], BIAS, OCV/CPU)|26.228|24.992|1.05|
|conv::Conv::(GFLOPS=6.641, K=[3 x 3], IN={1, 64, 300, 300}, OCN=64, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|24.957|-|
|conv::Conv::(GFLOPS=6.814, K=[3 x 3], IN={1, 512, 38, 38}, OCN=512, P=[1 x 1], BIAS, OCV/CPU)|21.524|21.581|1.00|
|conv::Conv::(GFLOPS=6.814, K=[3 x 3], IN={1, 512, 38, 38}, OCN=512, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|21.782|-|
|conv::Conv::(GFLOPS=8.025, K=[3 x 3], IN={1, 1024, 19, 19}, OCN=1206, P=[1 x 1], BIAS, OCV/CPU)|34.094|31.964|1.07|
|conv::Conv::(GFLOPS=8.025, K=[3 x 3], IN={1, 1024, 19, 19}, OCN=1206, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|31.925|-|
|conv::Conv::(GFLOPS=9.986, K=[3 x 3], IN={1, 512, 46, 46}, OCN=512, P=[1 x 1], BIAS, OCV/CPU)|28.677|27.813|1.03|
|conv::Conv::(GFLOPS=9.986, K=[3 x 3], IN={1, 512, 46, 46}, OCN=512, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|27.808|-|
|conv::Conv::(GFLOPS=9.987, K=[3 x 3], IN={1, 256, 92, 92}, OCN=256, P=[1 x 1], BIAS, OCV/CPU)|31.274|27.892|1.12|
|conv::Conv::(GFLOPS=9.987, K=[3 x 3], IN={1, 256, 92, 92}, OCN=256, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|27.910|-|
|conv::Conv::(GFLOPS=9.989, K=[3 x 3], IN={1, 128, 184, 184}, OCN=128, P=[1 x 1], BIAS, OCV/CPU)|30.533|30.007|1.02|
|conv::Conv::(GFLOPS=9.989, K=[3 x 3], IN={1, 128, 184, 184}, OCN=128, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|30.089|-|
|conv::Conv::(GFLOPS=9.993, K=[3 x 3], IN={1, 64, 368, 368}, OCN=64, P=[1 x 1], BIAS, OCV/CPU)|39.837|38.312|1.04|
|conv::Conv::(GFLOPS=9.993, K=[3 x 3], IN={1, 64, 368, 368}, OCN=64, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|38.477|-|
|conv::Conv::(GFLOPS=10.087, K=[3 x 3], IN={1, 576, 38, 50}, OCN=512, PM=SAME, BIAS, OCV/CPU)|32.480|29.237|1.11|
|conv::Conv::(GFLOPS=10.087, K=[3 x 3], IN={1, 576, 38, 50}, OCN=512, PM=SAME, BIAS, OCV/CPU_FP16)|-|29.452|-|
|conv::Conv::(GFLOPS=10.701, K=[3 x 3], IN={1, 512, 38, 38}, OCN=804, P=[1 x 1], BIAS, OCV/CPU)|33.544|32.832|1.02|
|conv::Conv::(GFLOPS=10.701, K=[3 x 3], IN={1, 512, 38, 38}, OCN=804, P=[1 x 1], BIAS, OCV/CPU_FP16)|-|32.784|-|
|conv::Conv::(GFLOPS=11.797, K=[5 x 5], IN={1, 240, 64, 64}, OCN=240, PM=SAME, OCV/CPU)|134.481|130.678|1.03|
|conv::Conv::(GFLOPS=11.797, K=[5 x 5], IN={1, 240, 64, 64}, OCN=240, PM=SAME, OCV/CPU_FP16)|-|70.134|-|
|conv::Conv::(GFLOPS=11.797, K=[5 x 5], IN={1, 480, 32, 32}, OCN=480, PM=SAME, OCV/CPU)|127.930|126.530|1.01|
|conv::Conv::(GFLOPS=11.797, K=[5 x 5], IN={1, 480, 32, 32}, OCN=480, PM=SAME, OCV/CPU_FP16)|-|65.261|-|
|conv::Conv::(GFLOPS=16.987, K=[5 x 5], IN={1, 1152, 16, 16}, OCN=1152, PM=SAME, OCV/CPU)|201.346|187.007|1.08|
|conv::Conv::(GFLOPS=16.987, K=[5 x 5], IN={1, 1152, 16, 16}, OCN=1152, PM=SAME, OCV/CPU_FP16)|-|91.525|-|
|conv::Conv::(GFLOPS=23.122, K=[5 x 5], IN={1, 672, 32, 32}, OCN=672, PM=SAME, OCV/CPU)|252.038|245.587|1.03|
|conv::Conv::(GFLOPS=23.122, K=[5 x 5], IN={1, 672, 32, 32}, OCN=672, PM=SAME, OCV/CPU_FP16)|-|125.477|-|
### Pull Request Readiness Checklist
See details at https://github.com/opencv/opencv/wiki/How_to_contribute#making-a-good-pull-request
- [x] I agree to contribute to the project under Apache 2 License.
- [x] To the best of my knowledge, the proposed patch is not based on a code under GPL or another license that is incompatible with OpenCV
- [x] The PR is proposed to the proper branch
- [ ] There is a reference to the original bug report and related work
- [ ] There is accuracy test, performance test and test data in opencv_extra repository, if applicable
Patch to opencv_extra has the same branch name.
- [ ] The feature is well documented and sample code can be built with the project CMake
2023-05-17 14:38:33 +08:00
if ( target = = DNN_TARGET_OPENCL_FP16 | | target = = DNN_TARGET_MYRIAD | | target = = DNN_TARGET_CPU_FP16 )
2019-12-20 21:36:32 +08:00
{
2021-11-26 03:56:27 +08:00
l1 = 0.02 ;
lInf = 0.2 ;
2019-12-20 21:36:32 +08:00
}
else if ( target = = DNN_TARGET_CUDA_FP16 )
{
l1 = 0.018 ;
lInf = 0.16 ;
}
2020-07-16 06:52:08 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL_FP16 )
{
l1 = 0.018f ; lInf = 0.16f ;
}
# endif
2023-11-20 18:45:37 +08:00
testONNXModels ( " tiny_yolo2 " , pb , l1 , lInf , false , true , 1 , true , false ) ;
2018-09-11 02:07:51 +08:00
}
TEST_P ( Test_ONNX_nets , CNN_MNIST )
{
2019-03-29 21:42:58 +08:00
// output range: [-1952; 6574], after Softmax [0; 1]
testONNXModels ( " cnn_mnist " , pb , default_l1 , default_lInf , true ) ;
2018-09-11 02:07:51 +08:00
}
TEST_P ( Test_ONNX_nets , MobileNet_v2 )
{
2019-03-29 21:42:58 +08:00
// output range: [-166; 317], after Softmax [0; 1]
testONNXModels ( " mobilenetv2 " , pb , default_l1 , default_lInf , true ) ;
2018-09-11 02:07:51 +08:00
}
2022-08-26 10:04:44 +08:00
TEST_P ( Test_ONNX_nets , MobileNet_v2_FP16 )
{
testONNXModels ( " mobilenetv2_fp16 " , npy , default_l1 , default_lInf , true ) ;
}
2018-09-11 02:07:51 +08:00
TEST_P ( Test_ONNX_nets , LResNet100E_IR )
{
2019-05-27 20:14:18 +08:00
applyTestTag (
2020-02-25 02:18:33 +08:00
# if defined(OPENCV_32BIT_CONFIGURATION) && defined(HAVE_OPENCL)
CV_TEST_TAG_MEMORY_2GB ,
# else
2019-05-27 20:14:18 +08:00
( target = = DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB ) ,
2020-02-25 02:18:33 +08:00
# endif
2023-12-25 12:57:02 +08:00
CV_TEST_TAG_DEBUG_VERYLONG
2019-05-27 20:14:18 +08:00
) ;
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
2019-06-15 20:17:25 +08:00
{
2019-12-02 21:16:06 +08:00
if ( target = = DNN_TARGET_OPENCL_FP16 ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( target = = DNN_TARGET_OPENCL ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2019-06-15 20:17:25 +08:00
}
2019-12-24 18:34:33 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
if ( target = = DNN_TARGET_OPENCL_FP16 ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
if ( target = = DNN_TARGET_OPENCL ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
}
2018-09-11 02:07:51 +08:00
2019-12-20 21:36:32 +08:00
double l1 = default_l1 , lInf = default_lInf ;
2018-09-11 02:07:51 +08:00
// output range: [-3; 3]
2023-11-20 18:45:37 +08:00
bool useWinograd = true ;
2019-12-20 21:36:32 +08:00
if ( backend = = DNN_BACKEND_OPENCV & & target = = DNN_TARGET_OPENCL_FP16 )
{
2018-09-11 02:07:51 +08:00
l1 = 0.009 ;
lInf = 0.035 ;
}
2019-12-20 21:36:32 +08:00
else if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target = = DNN_TARGET_CPU )
{
2019-04-08 16:29:10 +08:00
l1 = 4.6e-5 ;
2019-01-14 14:55:44 +08:00
lInf = 1.9e-4 ;
}
2019-12-20 21:36:32 +08:00
else if ( target = = DNN_TARGET_CUDA_FP16 )
{
2022-10-18 04:00:12 +08:00
l1 = 0.009 ;
2019-12-20 21:36:32 +08:00
lInf = 0.04 ;
}
2023-11-20 18:45:37 +08:00
else if ( target = = DNN_TARGET_CPU_FP16 )
{
useWinograd = false ;
l1 = 0.009 ;
lInf = 0.035 ;
}
testONNXModels ( " LResNet100E_IR " , pb , l1 , lInf , false , true , 1 , true , useWinograd ) ;
2018-09-11 02:07:51 +08:00
}
TEST_P ( Test_ONNX_nets , Emotion_ferplus )
{
2019-03-29 21:42:58 +08:00
# if defined(INF_ENGINE_RELEASE)
2019-12-24 18:34:33 +08:00
if ( target = = DNN_TARGET_MYRIAD & & getInferenceEngineVPUType ( ) = = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X ,
backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ;
2019-03-29 21:42:58 +08:00
# endif
2018-12-20 18:14:47 +08:00
double l1 = default_l1 ;
double lInf = default_lInf ;
2023-11-20 18:45:37 +08:00
bool useWinograd = true ;
2019-03-29 21:42:58 +08:00
// Output values are in range [-2.011, 2.111]
2022-04-20 00:40:25 +08:00
if ( ( backend = = DNN_BACKEND_OPENCV & & target = = DNN_TARGET_OPENCL_FP16 ) | | ( target = = DNN_TARGET_CUDA_FP16 ) )
2018-12-20 18:14:47 +08:00
l1 = 0.007 ;
2019-12-02 21:16:06 +08:00
else if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target = = DNN_TARGET_OPENCL_FP16 )
2018-12-20 18:14:47 +08:00
{
l1 = 0.021 ;
lInf = 0.034 ;
}
2019-12-02 21:16:06 +08:00
else if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & ( target = = DNN_TARGET_CPU | | target = = DNN_TARGET_OPENCL ) ) {
2019-01-14 14:55:44 +08:00
l1 = 2.4e-4 ;
lInf = 6e-4 ;
}
2023-11-20 18:45:37 +08:00
else if ( backend = = DNN_BACKEND_OPENCV & & target = = DNN_TARGET_CPU_FP16 )
{
useWinograd = false ;
l1 = 0.007 ;
}
2021-11-26 03:56:27 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2020040000)
2020-07-16 06:52:08 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL_FP16 )
{
2021-11-26 03:56:27 +08:00
l1 = 0.013f ; lInf = 0.035f ;
2020-07-16 06:52:08 +08:00
}
# endif
2023-11-20 18:45:37 +08:00
testONNXModels ( " emotion_ferplus " , pb , l1 , lInf , false , true , 1 , true , useWinograd ) ;
2018-09-11 02:07:51 +08:00
}
TEST_P ( Test_ONNX_nets , Inception_v2 )
{
2019-03-29 21:42:58 +08:00
testONNXModels ( " inception_v2 " , pb , default_l1 , default_lInf , true ) ;
2018-09-11 02:07:51 +08:00
}
TEST_P ( Test_ONNX_nets , DenseNet121 )
{
2018-10-09 06:38:06 +08:00
applyTestTag ( CV_TEST_TAG_MEMORY_512MB ) ;
2019-03-29 21:42:58 +08:00
// output range: [-87; 138], after Softmax [0; 1]
2019-05-30 22:36:00 +08:00
testONNXModels ( " densenet121 " , pb , default_l1 , default_lInf , true , target ! = DNN_TARGET_MYRIAD ) ;
2018-09-11 02:07:51 +08:00
}
2018-09-18 01:26:17 +08:00
TEST_P ( Test_ONNX_nets , Inception_v1 )
{
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2019-12-24 18:34:33 +08:00
if ( ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 | |
backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH ) & & target = = DNN_TARGET_MYRIAD )
2019-06-15 20:17:25 +08:00
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD ) ;
2018-12-20 18:14:47 +08:00
# endif
2018-09-18 01:26:17 +08:00
testONNXModels ( " inception_v1 " , pb ) ;
}
2018-09-11 02:07:51 +08:00
2018-10-09 03:18:41 +08:00
TEST_P ( Test_ONNX_nets , Shufflenet )
{
2021-11-30 20:08:35 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
2019-12-02 21:16:06 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 )
2019-06-15 20:17:25 +08:00
{
2019-12-02 21:16:06 +08:00
if ( target = = DNN_TARGET_OPENCL_FP16 ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( target = = DNN_TARGET_OPENCL ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_OPENCL , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
if ( target = = DNN_TARGET_MYRIAD ) applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ;
2019-06-15 20:17:25 +08:00
}
2021-11-30 20:08:35 +08:00
# endif
2018-10-09 03:18:41 +08:00
testONNXModels ( " shufflenet " , pb ) ;
}
2019-05-30 22:36:00 +08:00
TEST_P ( Test_ONNX_nets , Resnet34_kinetics )
{
2022-08-02 14:58:05 +08:00
applyTestTag ( CV_TEST_TAG_DEBUG_VERYLONG ) ;
2022-03-31 03:03:38 +08:00
# if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000)
// IE exception: Failed to allocate graph: MYRIAD device is not opened
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
// accuracy
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) )
applyTestTag ( target = = DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 ,
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION
) ;
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021040000)
2021-11-30 20:08:35 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH )
{
// IE exception: Function contains several inputs and outputs with one friendly name!
if ( target = = DNN_TARGET_MYRIAD )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_MYRIAD , CV_TEST_TAG_DNN_SKIP_IE_NGRAPH , CV_TEST_TAG_DNN_SKIP_IE_VERSION ) ;
}
2022-03-31 03:03:38 +08:00
# elif defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2021040000)
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 & & target ! = DNN_TARGET_CPU )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER ) ; // Only CPU on DLIE backend is supported
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target ! = DNN_TARGET_CPU )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_IE_NGRAPH ) ; // Only CPU on DLIE backend is supported
2021-11-30 20:08:35 +08:00
# endif
if ( backend = = DNN_BACKEND_OPENCV & & target ! = DNN_TARGET_CPU )
throw SkipTestException ( " Only CPU is supported " ) ; // FIXIT use tags
2019-05-30 22:36:00 +08:00
2021-12-17 21:28:34 +08:00
if ( backend = = DNN_BACKEND_VKCOM )
applyTestTag ( CV_TEST_TAG_DNN_SKIP_VULKAN ) ;
2019-07-16 15:53:50 +08:00
String onnxmodel = findDataFile ( " dnn/resnet-34_kinetics.onnx " , false ) ;
2019-05-30 22:36:00 +08:00
Mat image0 = imread ( findDataFile ( " dnn/dog416.png " ) ) ;
Mat image1 = imread ( findDataFile ( " dnn/street.png " ) ) ;
Mat ref0 = blobFromNPY ( _tf ( " data/output_kinetics0.npy " ) ) ;
Mat ref1 = blobFromNPY ( _tf ( " data/output_kinetics1.npy " ) ) ;
std : : vector < Mat > images_0 ( 16 , image0 ) ;
std : : vector < Mat > images_1 ( 16 , image1 ) ;
Mat blob0 = blobFromImages ( images_0 , 1.0 , Size ( 112 , 112 ) , Scalar ( 114.7748 , 107.7354 , 99.4750 ) , true , true ) ;
Mat blob1 = blobFromImages ( images_1 , 1.0 , Size ( 112 , 112 ) , Scalar ( 114.7748 , 107.7354 , 99.4750 ) , true , true ) ;
Net permute ;
LayerParams lp ;
int order [ ] = { 1 , 0 , 2 , 3 } ;
lp . set ( " order " , DictValue : : arrayInt < int * > ( & order [ 0 ] , 4 ) ) ;
permute . addLayerToPrev ( " perm " , " Permute " , lp ) ;
2019-12-02 21:16:06 +08:00
permute . setPreferableBackend ( backend ) ;
permute . setPreferableTarget ( target ) ;
2019-05-30 22:36:00 +08:00
permute . setInput ( blob0 ) ;
Mat input0 = permute . forward ( ) . clone ( ) ;
permute . setInput ( blob1 ) ;
Mat input1 = permute . forward ( ) . clone ( ) ;
int dims [ ] = { 1 , 3 , 16 , 112 , 112 } ;
input0 = input0 . reshape ( 0 , 5 , & dims [ 0 ] ) ;
input1 = input1 . reshape ( 0 , 5 , & dims [ 0 ] ) ;
Net net = readNetFromONNX ( onnxmodel ) ;
ASSERT_FALSE ( net . empty ( ) ) ;
net . setPreferableBackend ( backend ) ;
net . setPreferableTarget ( target ) ;
// output range [-5, 11]
float l1 = 0.0013 ;
float lInf = 0.009 ;
2021-11-30 20:08:35 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL_FP16 )
{
l1 = 0.02 ;
lInf = 0.07 ;
}
2019-12-20 21:36:32 +08:00
if ( target = = DNN_TARGET_CUDA_FP16 )
{
2020-11-21 20:05:20 +08:00
l1 = 0.01 ;
lInf = 0.06 ;
2019-12-20 21:36:32 +08:00
}
2019-05-30 22:36:00 +08:00
2023-03-21 19:50:53 +08:00
testInputShapes ( net , { input0 } ) ;
2019-05-30 22:36:00 +08:00
checkBackend ( & input0 , & ref0 ) ;
net . setInput ( input0 ) ;
Mat out = net . forward ( ) . clone ( ) ;
normAssert ( ref0 , out , " " , l1 , lInf ) ;
checkBackend ( & input1 , & ref1 ) ;
net . setInput ( input1 ) ;
out = net . forward ( ) . clone ( ) ;
normAssert ( ref1 , out , " " , l1 , lInf ) ;
expectNoFallbacksFromIE ( net ) ;
}
2021-08-18 01:09:25 +08:00
TEST_P ( Test_ONNX_layers , CumSum )
{
testONNXModels ( " cumsum_1d_exclusive_1 " ) ;
testONNXModels ( " cumsum_1d_reverse " ) ;
testONNXModels ( " cumsum_1d_exclusive_1_reverse " ) ;
testONNXModels ( " cumsum_2d_dim_1 " ) ;
testONNXModels ( " cumsum_3d_dim_2 " ) ;
}
2023-11-24 15:36:06 +08:00
static void testYOLO ( const std : : string & weightPath , const std : : vector < int > & refClassIds ,
2023-12-11 21:42:51 +08:00
const std : : vector < float > & refScores , const std : : vector < Rect2d > & refBoxes ,
Image2BlobParams imgParams , float conf_threshold = 0.3 , float iou_threshold = 0.5 ,
double scores_diff = 1e-5 , double boxes_iou_diff = 1e-4 , const std : : string test_name = " " )
2022-09-19 18:38:03 +08:00
{
std : : string imgPath = _tf ( " ../dog_orig_size.png " ) ;
Mat img = imread ( imgPath ) ;
2023-12-11 21:42:51 +08:00
Mat inp = blobFromImageWithParams ( img , imgParams ) ;
2022-09-19 18:38:03 +08:00
Net net = readNet ( weightPath ) ;
net . setInput ( inp ) ;
std : : vector < Mat > outs ;
net . forward ( outs , net . getUnconnectedOutLayersNames ( ) ) ;
2023-12-11 21:42:51 +08:00
// Retrieve
std : : vector < int > keep_classIds ;
std : : vector < float > keep_confidences ;
std : : vector < Rect2d > keep_boxes ;
yoloPostProcessing ( outs , keep_classIds , keep_confidences , keep_boxes , conf_threshold , iou_threshold , test_name ) ;
normAssertDetections (
refClassIds , refScores , refBoxes ,
keep_classIds , keep_confidences , keep_boxes ,
" " , 0.0 , scores_diff , boxes_iou_diff ) ;
}
void yoloPostProcessing (
std : : vector < Mat > & outs ,
std : : vector < int > & keep_classIds ,
std : : vector < float > & keep_confidences ,
std : : vector < Rect2d > & keep_boxes ,
float conf_threshold ,
float iou_threshold ,
2024-07-02 23:26:34 +08:00
const std : : string & model_name ,
const int nc
2023-12-11 21:42:51 +08:00
) {
2022-09-19 18:38:03 +08:00
// Retrieve
std : : vector < int > classIds ;
std : : vector < float > confidences ;
std : : vector < Rect2d > boxes ;
2023-12-11 21:42:51 +08:00
2024-07-02 23:26:34 +08:00
if ( model_name = = " yolov8 " | | model_name = = " yolov10 " | |
model_name = = " yolov9 " )
{
2023-12-11 21:42:51 +08:00
cv : : transposeND ( outs [ 0 ] , { 0 , 2 , 1 } , outs [ 0 ] ) ;
}
2024-07-02 23:26:34 +08:00
if ( model_name = = " yolonas " ) {
2024-01-03 17:42:10 +08:00
// outs contains 2 elemets of shape [1, 8400, 80] and [1, 8400, 4]. Concat them to get [1, 8400, 84]
Mat concat_out ;
// squeeze the first dimension
outs [ 0 ] = outs [ 0 ] . reshape ( 1 , outs [ 0 ] . size [ 1 ] ) ;
outs [ 1 ] = outs [ 1 ] . reshape ( 1 , outs [ 1 ] . size [ 1 ] ) ;
cv : : hconcat ( outs [ 1 ] , outs [ 0 ] , concat_out ) ;
outs [ 0 ] = concat_out ;
// remove the second element
outs . pop_back ( ) ;
// unsqueeze the first dimension
outs [ 0 ] = outs [ 0 ] . reshape ( 0 , std : : vector < int > { 1 , 8400 , 84 } ) ;
}
2024-07-02 23:26:34 +08:00
// assert if last dim is 85 or 84
CV_CheckEQ ( outs [ 0 ] . dims , 3 , " Invalid output shape. The shape should be [1, #anchors, 85 or 84] " ) ;
CV_CheckEQ ( ( outs [ 0 ] . size [ 2 ] = = nc + 5 | | outs [ 0 ] . size [ 2 ] = = 80 + 4 ) , true , " Invalid output shape: " ) ;
2023-12-11 21:42:51 +08:00
for ( auto preds : outs ) {
preds = preds . reshape ( 1 , preds . size [ 1 ] ) ; // [1, 8400, 85] -> [8400, 85]
2023-11-24 15:36:06 +08:00
for ( int i = 0 ; i < preds . rows ; + + i )
{
2024-01-03 17:42:10 +08:00
// filter out non object
2024-07-02 23:26:34 +08:00
float obj_conf = ( model_name = = " yolov8 " | | model_name = = " yolonas " | |
model_name = = " yolov9 " | | model_name = = " yolov10 " ) ? 1.0f : preds . at < float > ( i , 4 ) ;
2023-11-24 15:36:06 +08:00
if ( obj_conf < conf_threshold )
continue ;
2024-07-02 23:26:34 +08:00
Mat scores = preds . row ( i ) . colRange ( ( model_name = = " yolov8 " | | model_name = = " yolonas " | | model_name = = " yolov9 " | | model_name = = " yolov10 " ) ? 4 : 5 , preds . cols ) ;
2023-11-24 15:36:06 +08:00
double conf ;
Point maxLoc ;
minMaxLoc ( scores , 0 , & conf , 0 , & maxLoc ) ;
2023-12-11 21:42:51 +08:00
2024-07-02 23:26:34 +08:00
conf = ( model_name = = " yolov8 " | | model_name = = " yolonas " | | model_name = = " yolov9 " | | model_name = = " yolov10 " ) ? conf : conf * obj_conf ;
2023-11-24 15:36:06 +08:00
if ( conf < conf_threshold )
continue ;
// get bbox coords
float * det = preds . ptr < float > ( i ) ;
double cx = det [ 0 ] ;
double cy = det [ 1 ] ;
double w = det [ 2 ] ;
double h = det [ 3 ] ;
2023-12-11 21:42:51 +08:00
2023-11-24 15:36:06 +08:00
// [x1, y1, x2, y2]
2024-07-02 23:26:34 +08:00
if ( model_name = = " yolonas " | | model_name = = " yolov10 " ) {
2024-01-03 17:42:10 +08:00
boxes . push_back ( Rect2d ( cx , cy , w , h ) ) ;
} else {
boxes . push_back ( Rect2d ( cx - 0.5 * w , cy - 0.5 * h ,
cx + 0.5 * w , cy + 0.5 * h ) ) ;
}
2024-07-02 23:26:34 +08:00
classIds . push_back ( maxLoc . x ) ;
2023-11-24 15:36:06 +08:00
confidences . push_back ( conf ) ;
}
2022-09-19 18:38:03 +08:00
}
// NMS
std : : vector < int > keep_idx ;
NMSBoxes ( boxes , confidences , conf_threshold , iou_threshold , keep_idx ) ;
for ( auto i : keep_idx )
{
keep_classIds . push_back ( classIds [ i ] ) ;
keep_confidences . push_back ( confidences [ i ] ) ;
keep_boxes . push_back ( boxes [ i ] ) ;
}
2023-12-11 21:42:51 +08:00
}
2024-07-02 23:26:34 +08:00
TEST_P ( Test_ONNX_nets , YOLOv10 )
{
std : : string weightPath = _tf ( " models/yolov10s.onnx " , false ) ;
Size targetSize { 640 , 480 } ;
float conf_threshold = 0.50 ;
float iou_threshold = 0.50 ;
std : : vector < int > refClassIds { 1 , 16 , 7 } ;
std : : vector < float > refScores { 0.9510f , 0.9454f , 0.8404f } ;
std : : vector < Rect2d > refBoxes {
Rect2d ( 105.5014 , 112.8838 , 472.9274 , 350.0603 ) ,
Rect2d ( 109.8231 , 185.7994 , 258.5916 , 452.9302 ) ,
Rect2d ( 388.5018 , 62.1034 , 576.6399 , 143.3986 )
} ;
Image2BlobParams imgParams (
Scalar : : all ( 1 / 255.0 ) ,
targetSize ,
Scalar : : all ( 0 ) ,
true ,
CV_32F ,
DNN_LAYOUT_NCHW ,
DNN_PMODE_LETTERBOX ,
Scalar : : all ( 114 )
) ;
testYOLO (
weightPath , refClassIds , refScores , refBoxes ,
imgParams , conf_threshold , iou_threshold ,
1.0e-4 , 1.0e-4 , " yolov10 " ) ;
}
TEST_P ( Test_ONNX_nets , YOLOv9 )
{
std : : string weightPath = _tf ( " models/yolov9t.onnx " , false ) ;
Size targetSize { 640 , 480 } ;
float conf_threshold = 0.50 ;
float iou_threshold = 0.50 ;
2023-12-11 21:42:51 +08:00
2024-07-02 23:26:34 +08:00
std : : vector < int > refClassIds { 1 , 16 , 2 } ; // wrong class mapping for yolov9
2024-07-03 17:10:31 +08:00
std : : vector < float > refScores { 0.959274f , 0.901125f , 0.559396f } ;
2024-07-02 23:26:34 +08:00
std : : vector < Rect2d > refBoxes {
Rect2d ( 106.255 , 107.927 , 472.497 , 350.309 ) ,
Rect2d ( 108.633 , 185.256 , 259.287 , 450.672 ) ,
Rect2d ( 390.701 , 62.1454 , 576.928 , 141.795 )
} ;
Image2BlobParams imgParams (
Scalar : : all ( 1 / 255.0 ) ,
targetSize ,
Scalar : : all ( 0 ) ,
true ,
CV_32F ,
DNN_LAYOUT_NCHW ,
DNN_PMODE_LETTERBOX ,
Scalar : : all ( 114 )
) ;
testYOLO (
weightPath , refClassIds , refScores , refBoxes ,
imgParams , conf_threshold , iou_threshold ,
1.0e-4 , 1.0e-4 , " yolov9 " ) ;
}
2023-12-11 21:42:51 +08:00
TEST_P ( Test_ONNX_nets , YOLOX )
{
2023-12-25 12:57:02 +08:00
applyTestTag ( CV_TEST_TAG_DEBUG_VERYLONG ) ;
2023-12-11 21:42:51 +08:00
std : : string weightPath = _tf ( " models/yolox_s_inf_decoder.onnx " , false ) ;
Size targetSize { 640 , 640 } ;
float conf_threshold = 0.50 ;
float iou_threshold = 0.50 ;
2022-09-19 18:38:03 +08:00
2023-12-11 21:42:51 +08:00
std : : vector < int > refClassIds { 1 , 16 , 7 } ;
std : : vector < float > refScores { 0.9649f , 0.9163f , 0.6879f } ;
std : : vector < Rect2d > refBoxes {
Rect2d ( 105.5384 , 179.4100 , 470.6339 , 428.5553 ) ,
Rect2d ( 111.4482 , 263.4098 , 258.7438 , 526.1140 ) ,
Rect2d ( 389.1421 , 143.9286 , 577.9495 , 222.0294 )
} ;
Image2BlobParams imgParams (
Scalar : : all ( 1 ) ,
targetSize ,
Scalar : : all ( 0 ) ,
true ,
CV_32F ,
DNN_LAYOUT_NCHW ,
DNN_PMODE_LETTERBOX ,
Scalar : : all ( 114 )
) ;
testYOLO (
weightPath , refClassIds , refScores , refBoxes ,
imgParams , conf_threshold , iou_threshold ,
1.0e-4 , 1.0e-4 ) ;
}
2024-01-03 17:42:10 +08:00
TEST_P ( Test_ONNX_nets , YOLONas )
{
// model information: https://dl.opencv.org/models/yolo-nas/Readme.md
std : : string weightPath = _tf ( " models/yolo_nas_s.onnx " , false ) ;
Size targetSize { 640 , 640 } ;
float conf_threshold = 0.50 ;
float iou_threshold = 0.50 ;
std : : vector < int > refClassIds { 1 , 16 , 7 } ;
std : : vector < float > refScores { 0.9720f , 0.9283f , 0.8990f } ;
// [x1, y1, x2, y2]
std : : vector < Rect2d > refBoxes {
Rect2d ( 105.516 , 173.696 , 471.323 , 430.433 ) ,
Rect2d ( 109.241 , 263.406 , 259.872 , 531.858 ) ,
Rect2d ( 390.153 , 142.492 , 574.932 , 222.709 )
} ;
Image2BlobParams imgParams (
Scalar : : all ( 1 / 255.0 ) ,
targetSize ,
Scalar : : all ( 0 ) ,
false ,
CV_32F ,
DNN_LAYOUT_NCHW ,
DNN_PMODE_LETTERBOX ,
Scalar : : all ( 114 )
) ;
testYOLO (
weightPath , refClassIds , refScores , refBoxes ,
imgParams , conf_threshold , iou_threshold ,
1.0e-4 , 1.0e-4 , " yolonas " ) ;
}
2023-12-11 21:42:51 +08:00
TEST_P ( Test_ONNX_nets , YOLOv8 )
{
std : : string weightPath = _tf ( " models/yolov8n.onnx " , false ) ;
Size targetSize { 640 , 640 } ;
float conf_threshold = 0.25 ;
float iou_threshold = 0.50 ;
std : : vector < int > refClassIds { 16 , 1 , 2 } ;
std : : vector < float > refScores { 0.9332f , 0.8959f , 0.6157f } ;
// [x1, y1, x2, y2]
std : : vector < Rect2d > refBoxes {
Rect2d ( 108.8965 , 261.9094 , 257.1633 , 530.3049 ) ,
Rect2d ( 110.4020 , 192.9843 , 473.4418 , 429.5965 ) ,
Rect2d ( 389.1603 , 143.2506 , 577.3542 , 223.0615 ) ,
} ;
Image2BlobParams imgParams (
Scalar : : all ( 1 / 255.0 ) ,
targetSize ,
Scalar : : all ( 0 ) ,
true ,
CV_32F ,
DNN_LAYOUT_NCHW ,
DNN_PMODE_LETTERBOX ,
Scalar : : all ( 114 )
) ;
testYOLO (
weightPath , refClassIds , refScores , refBoxes ,
imgParams , conf_threshold , iou_threshold ,
1.0e-4 , 1.0e-4 , " yolov8 " ) ;
2022-09-19 18:38:03 +08:00
}
2023-11-24 15:36:06 +08:00
// This test is mainly to test:
// 1. identity node with constant input
// 2. limited support to range operator (all inputs are constant)
// 3. parseExpand with multiple broadcast axes
// 4. 1D mat dimension issue with the output of range operator
TEST_P ( Test_ONNX_nets , YOLOv7 )
{
2023-12-25 12:57:02 +08:00
applyTestTag (
CV_TEST_TAG_MEMORY_2GB ,
CV_TEST_TAG_DEBUG_VERYLONG
) ;
2023-12-25 11:03:30 +08:00
2023-12-28 00:05:24 +08:00
std : : string weightPath = _tf ( " models/yolov7.onnx " , false ) ;
2023-11-24 15:36:06 +08:00
// Reference, which is collected with input size of 640x640
std : : vector < int > refClassIds { 1 , 16 , 7 } ;
std : : vector < float > refScores { 0.9614331f , 0.9589417f , 0.8679074f } ;
// [x1, y1, x2, y2] x 3
std : : vector < Rect2d > refBoxes { Rect2d ( 105.973236f , 150.16716f , 472.59012f , 466.48834f ) ,
Rect2d ( 109.97953f , 246.17862f , 259.83676f , 600.76624f ) ,
Rect2d ( 385.96185f , 83.02809f , 576.07355f , 189.82793f ) } ;
2023-12-11 21:42:51 +08:00
Size targetSize { 640 , 640 } ;
Image2BlobParams imgParams (
Scalar : : all ( 1 / 255.0 ) ,
targetSize ,
Scalar : : all ( 0 ) ,
true ,
CV_32F ,
DNN_LAYOUT_NCHW ,
DNN_PMODE_NULL ,
Scalar : : all ( 0 )
) ;
testYOLO ( weightPath , refClassIds , refScores , refBoxes , imgParams ) ;
}
TEST_P ( Test_ONNX_nets , YOLOv6 )
{
std : : string weightPath = _tf ( " models/yolov6n.onnx " , false ) ;
Size targetSize { 640 , 640 } ;
float conf_threshold = 0.30 ;
float iou_threshold = 0.50 ;
std : : vector < int > refClassIds { 1 , 16 , 7 , 1 } ;
std : : vector < float > refScores { 0.95031f , 0.87123f , 0.65453f , 0.34142f } ;
// [x1, y1, x2, y2] x 3
std : : vector < Rect2d > refBoxes { Rect2d ( 98.84 , 177.91 , 473.29 , 431.19 ) ,
Rect2d ( 109.80 , 265.50 , 258.86 , 531.97 ) ,
Rect2d ( 387.79 , 141.61 , 576.98 , 223.52 ) ,
Rect2d ( 105.62 , 199.24 , 218.37 , 389.84 ) ,
} ;
Image2BlobParams imgParams (
Scalar : : all ( 1 / 255.0 ) ,
targetSize ,
Scalar : : all ( 0 ) ,
true ,
CV_32F ,
DNN_LAYOUT_NCHW ,
DNN_PMODE_LETTERBOX ,
Scalar : : all ( 114 )
) ;
testYOLO (
weightPath , refClassIds , refScores , refBoxes ,
imgParams , conf_threshold , iou_threshold ,
1.0e-4 , 1.0e-3 ) ;
2023-11-24 15:36:06 +08:00
}
TEST_P ( Test_ONNX_nets , YOLOv5n )
{
std : : string weightPath = findDataFile ( " dnn/yolov5n.onnx " , false ) ;
// Reference, which is collected with input size of 640x640
std : : vector < int > refClassIds { 16 , 2 , 1 } ;
std : : vector < float > refScores { 0.749053f , 0.616853f , 0.32506f } ;
// [x1, y1, x2, y2] x 4
std : : vector < Rect2d > refBoxes { Rect2d ( 108.088f , 239.293f , 266.196f , 607.658f ) ,
Rect2d ( 392.028f , 89.9233f , 579.152f , 190.447f ) ,
Rect2d ( 120.278f , 159.76 , 214.481f , 241.473f ) } ;
2023-12-11 21:42:51 +08:00
Size targetSize { 640 , 640 } ;
Image2BlobParams imgParams (
Scalar : : all ( 1 / 255.0 ) ,
targetSize ,
Scalar : : all ( 0 ) ,
true ,
CV_32F ,
DNN_LAYOUT_NCHW ,
DNN_PMODE_NULL ,
Scalar : : all ( 0 )
) ;
testYOLO ( weightPath , refClassIds , refScores , refBoxes , imgParams ) ;
2023-11-24 15:36:06 +08:00
}
2022-11-15 14:29:30 +08:00
TEST_P ( Test_ONNX_layers , Tile )
{
testONNXModels ( " tile " , pb ) ;
}
2023-02-11 02:03:29 +08:00
TEST_P ( Test_ONNX_layers , Gelu )
{
testONNXModels ( " gelu " ) ;
testONNXModels ( " gelu_approximation " ) ;
}
2023-03-28 00:40:59 +08:00
TEST_P ( Test_ONNX_layers , OpenAI_CLIP_head )
{
testONNXModels ( " clip-vit-base-head " ) ;
}
2023-05-05 11:16:19 +08:00
TEST_P ( Test_ONNX_layers , where_node )
{
testONNXModels ( " where_layer " ) ;
}
2023-10-08 02:48:44 +08:00
TEST_P ( Test_ONNX_layers , Gemm_all_attributes ) {
2023-09-20 05:53:34 +08:00
testONNXModels ( " test_gemm_all_attributes " , pb , 0 , 0 , false , true , 2 ) ;
}
2023-10-08 02:48:44 +08:00
TEST_P ( Test_ONNX_layers , Gemm_alpha ) {
2023-09-20 05:53:34 +08:00
testONNXModels ( " test_gemm_alpha " , pb , 0 , 0 , false , true , 2 ) ;
}
2023-10-08 02:48:44 +08:00
TEST_P ( Test_ONNX_layers , Gemm_beta ) {
2023-09-20 05:53:34 +08:00
testONNXModels ( " test_gemm_beta " , pb , 0 , 0 , false , true , 2 ) ;
}
2023-10-08 02:48:44 +08:00
TEST_P ( Test_ONNX_layers , Gemm_default_matrix_bias ) {
2023-09-20 05:53:34 +08:00
testONNXModels ( " test_gemm_default_matrix_bias " , pb , 0 , 0 , false , true , 2 ) ;
}
2023-10-08 02:48:44 +08:00
TEST_P ( Test_ONNX_layers , Gemm_default_no_bias ) {
2023-09-20 05:53:34 +08:00
testONNXModels ( " test_gemm_default_no_bias " , pb , 0 , 0 , false , true , 2 ) ;
}
2023-10-08 02:48:44 +08:00
TEST_P ( Test_ONNX_layers , Gemm_default_scalar_bias ) {
2023-09-20 05:53:34 +08:00
testONNXModels ( " test_gemm_default_scalar_bias " , pb , 0 , 0 , false , true , 2 ) ;
}
2023-10-08 02:48:44 +08:00
TEST_P ( Test_ONNX_layers , Gemm_default_single_elem_vector_bias ) {
2023-09-20 05:53:34 +08:00
testONNXModels ( " test_gemm_default_single_elem_vector_bias " , pb , 0 , 0 , false , true , 2 ) ;
}
2023-10-08 02:48:44 +08:00
TEST_P ( Test_ONNX_layers , Gemm_default_vector_bias ) {
2023-09-20 05:53:34 +08:00
testONNXModels ( " test_gemm_default_vector_bias " , pb , 0 , 0 , false , true , 2 ) ;
}
2023-10-08 02:48:44 +08:00
TEST_P ( Test_ONNX_layers , Gemm_default_zero_bias ) {
2023-09-20 05:53:34 +08:00
testONNXModels ( " test_gemm_default_zero_bias " , pb , 0 , 0 , false , true , 2 ) ;
}
2023-10-08 02:48:44 +08:00
TEST_P ( Test_ONNX_layers , Gemm_transposeA ) {
2023-09-20 05:53:34 +08:00
testONNXModels ( " test_gemm_transposeA " , pb , 0 , 0 , false , true , 2 ) ;
}
2023-10-08 02:48:44 +08:00
TEST_P ( Test_ONNX_layers , Gemm_transposeB ) {
2023-09-20 05:53:34 +08:00
testONNXModels ( " test_gemm_transposeB " , pb , 0 , 0 , false , true , 2 ) ;
}
2023-09-27 14:28:52 +08:00
// Note: These tests are converted from onnx/onnx so that they have constant shape as input.
// TODO: They can be moved into conformance tests once dynamic input is properly supported.
TEST_P ( Test_ONNX_layers , Expand_dim_changed ) {
testONNXModels ( " test_expand_dim_changed " , pb , 0 , 0 , false , true , 1 ) ;
}
TEST_P ( Test_ONNX_layers , Expand_dim_unchanged ) {
testONNXModels ( " test_expand_dim_unchanged " , pb , 0 , 0 , false , true , 1 ) ;
}
TEST_P ( Test_ONNX_layers , Expand_shape_model1 ) {
testONNXModels ( " test_expand_shape_model1 " , pb , 0 , 0 , false , true , 1 ) ;
}
TEST_P ( Test_ONNX_layers , Expand_shape_model2 ) {
testONNXModels ( " test_expand_shape_model2 " , pb , 0 , 0 , false , true , 1 ) ;
}
TEST_P ( Test_ONNX_layers , Expand_shape_model3 ) {
testONNXModels ( " test_expand_shape_model3 " , pb , 0 , 0 , false , true , 1 ) ;
}
TEST_P ( Test_ONNX_layers , Expand_shape_model4 ) {
testONNXModels ( " test_expand_shape_model4 " , pb , 0 , 0 , false , true , 1 ) ;
}
2023-12-21 00:35:07 +08:00
TEST_P ( Test_ONNX_layers , Attention ) {
testONNXModels ( " attention " ) ;
}
TEST_P ( Test_ONNX_layers , AttentionSingleHead ) {
testONNXModels ( " attention_single_head " ) ;
}
2024-07-04 21:25:31 +08:00
TEST_P ( Test_ONNX_layers , PyTorchAttentionSingleHead ) {
testONNXModels ( " pytorch_attention_single_head " ) ;
}
TEST_P ( Test_ONNX_layers , PyTorchUnflatten ) {
testONNXModels ( " unflatten " ) ;
}
2023-12-21 00:35:07 +08:00
TEST_P ( Test_ONNX_nets , ViT_B_32 ) {
applyTestTag ( CV_TEST_TAG_LONG , CV_TEST_TAG_DEBUG_LONG ) ;
const std : : string model_path = _tf ( " models/vit_b_32.onnx " , false ) ;
auto net = readNet ( model_path ) ;
ASSERT_FALSE ( net . empty ( ) ) ;
net . setPreferableBackend ( backend ) ;
net . setPreferableTarget ( target ) ;
auto image = imread ( _tf ( " ../googlenet_0.png " ) ) ;
auto blob = blobFromImage ( image , 1.f , Size ( 224 , 224 ) ) ;
auto ref = blobFromNPY ( _tf ( " data/output_vit_b_32.npy " ) ) ;
checkBackend ( & blob , & ref ) ;
net . setInput ( blob ) ;
auto out = net . forward ( ) ;
2023-12-21 21:39:05 +08:00
double l1 = default_l1 ;
double lInf = default_lInf ;
if ( target = = DNN_TARGET_CUDA_FP16 )
{
l1 = 0.01 ;
2023-12-22 18:37:44 +08:00
lInf = 0.06 ;
2023-12-21 21:39:05 +08:00
}
if ( target = = DNN_TARGET_OPENCL_FP16 )
{
l1 = 0.008 ;
lInf = 0.04 ;
}
2024-07-08 19:24:36 +08:00
if ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH ) {
if ( target = = DNN_TARGET_CPU ) {
l1 = 4.4e-5 ; // Expected: (normL1) <= (l1), actual: 4.31208e-05 vs 1e-05
lInf = 0.0002 ; // Expected: (normInf) <= (lInf), actual: 0.000194907 vs 0.0001
} else if ( target = = DNN_TARGET_OPENCL | | target = = DNN_TARGET_OPENCL_FP16 ) {
l1 = 0.0092 ; // Expected: (normL1) <= (l1), actual: 0.00918349 vs 4.4e-05
lInf = 0.056 ; // Expected: (normInf) <= (lInf), actual: 0.0556431 vs 0.0002
}
}
2023-12-21 21:39:05 +08:00
normAssert ( ref , out , " ViTB_32 " , l1 , lInf ) ;
2023-12-21 00:35:07 +08:00
}
TEST_P ( Test_ONNX_nets , VitTrack ) {
auto image = imread ( _tf ( " ../dog_orig_size.png " ) ) ;
auto input0 = blobFromImage ( image , 1.f , Size ( 128 , 128 ) ) ;
auto input1 = blobFromImage ( image , 1.f , Size ( 256 , 256 ) ) ;
auto net = readNet ( _tf ( " models/object_tracking_vittrack_2023sep.onnx " , false ) ) ;
net . setInput ( input0 , " template " ) ;
net . setInput ( input1 , " search " ) ;
std : : vector < std : : string > output_names { " output1 " , " output2 " , " output3 " } ;
std : : vector < Mat > outputs ;
net . forward ( outputs , output_names ) ;
auto ref_output1 = blobFromNPY ( _tf ( " data/output_object_tracking_vittrack_2023sep_0.npy " ) ) ;
auto ref_output2 = blobFromNPY ( _tf ( " data/output_object_tracking_vittrack_2023sep_1.npy " ) ) ;
auto ref_output3 = blobFromNPY ( _tf ( " data/output_object_tracking_vittrack_2023sep_2.npy " ) ) ;
normAssert ( ref_output1 , outputs [ 0 ] , " VitTrack output1 " ) ;
normAssert ( ref_output2 , outputs [ 1 ] , " VitTrack output2 " ) ;
normAssert ( ref_output3 , outputs [ 2 ] , " VitTrack output3 " ) ;
}
2024-01-10 18:01:00 +08:00
TEST_P ( Test_ONNX_layers , LayerNormNoFusion ) {
testONNXModels ( " layer_norm_no_fusion " ) ;
}
2024-03-29 22:35:23 +08:00
TEST_P ( Test_ONNX_layers , MatMulAddFusion ) {
double l1 = ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL ) ? 0.0018 : default_l1 ;
double lInf = ( backend = = DNN_BACKEND_INFERENCE_ENGINE_NGRAPH & & target = = DNN_TARGET_OPENCL ) ? 0.011 : default_lInf ;
testONNXModels ( " biased_matmul " , npy , l1 , lInf ) ;
}
2024-04-03 20:56:59 +08:00
TEST_P ( Test_ONNX_layers , ClipDivSharedConstant ) {
testONNXModels ( " clip_div_shared_constant " ) ;
}
Merge pull request #23279 from fengyuentau:add_topk
dnn: add ONNX TopK #23279
Merge with https://github.com/opencv/opencv_extra/pull/1200
Partially fixes #22890 and #20258
To-do:
- [x] TopK forward impl
- [x] add tests
- [x] support Opset 1 & 10 if possible
- [ ] ~Support other backends~ (TopK has two outputs, which is not supported by other backends, such as openvino)
Perf:
M1 (time in millisecond)
| input shape | axis | dnn | ort |
| --------------- | ---- | ---- | ---- |
| (1000, 100) | 0 | 1.68 | 4.07 |
| (1000, 100) K5 | 0 | 1.13 | 0.12 |
| (1000, 100) | 1 | 0.96 | 0.77 |
| (100, 100, 100) | 0 | 10.00 | 31.13 |
| (100, 100, 100) | 1 | 7.33 | 9.17 |
| (100, 100, 100) | 2 | 7.52 | 9.48 |
M2 (time in milisecond)
| input shape | axis | dnn | ort |
| --------------- | ---- | ---- | ---- |
| (1000, 100) | 0 | 0.76 | 2.44 |
| (1000, 100) K5 | 0 | 0.68 | 0.07 |
| (1000, 100) | 1 | 0.41 | 0.50 |
| (100, 100, 100) | 0 | 4.83 | 17.52|
| (100, 100, 100) | 1 | 3.60 | 5.08 |
| (100, 100, 100) | 2 | 3.73 | 5.10 |
ONNXRuntime performance testing script: https://gist.github.com/fengyuentau/a119f94fd16721ec9974b8c7b0a45d4c
### Pull Request Readiness Checklist
See details at https://github.com/opencv/opencv/wiki/How_to_contribute#making-a-good-pull-request
- [x] I agree to contribute to the project under Apache 2 License.
- [x] To the best of my knowledge, the proposed patch is not based on a code under GPL or another license that is incompatible with OpenCV
- [x] The PR is proposed to the proper branch
- [x] There is a reference to the original bug report and related work
- [x] There is accuracy test, performance test and test data in opencv_extra repository, if applicable
Patch to opencv_extra has the same branch name.
- [x] The feature is well documented and sample code can be built with the project CMake
2024-08-21 22:03:24 +08:00
TEST_P ( Test_ONNX_layers , TopK ) {
auto test = [ & ] ( const std : : string & basename , double l1 = 0 , double lInf = 0 ) {
std : : string onnxmodel = _tf ( " models/ " + basename + " .onnx " , true ) ;
Mat input = readTensorFromONNX ( _tf ( " data/input_ " + basename + " .pb " ) ) ;
Mat output_ref_val = readTensorFromONNX ( _tf ( " data/output_ " + basename + " _0.pb " ) ) ,
output_ref_ind = readTensorFromONNX ( _tf ( " data/output_ " + basename + " _1.pb " ) ) ;
checkBackend ( & input , & output_ref_val ) ;
checkBackend ( & input , & output_ref_ind ) ;
Net net = readNetFromONNX ( onnxmodel ) ;
net . setPreferableBackend ( backend ) ;
net . setPreferableTarget ( target ) ;
net . setInput ( input ) ;
std : : vector < Mat > outputs ;
net . forward ( outputs , std : : vector < std : : string > { " values " , " indices " } ) ;
Mat output_res_val = outputs . front ( ) ,
output_res_ind = outputs . back ( ) ;
output_res_ind . convertTo ( output_res_ind , CV_32S ) ; // TODO: remove this conversion on 5.x
normAssert ( output_ref_val , output_res_val , ( basename + " values " ) . c_str ( ) , l1 ? l1 : default_l1 , lInf ? lInf : default_lInf ) ;
normAssert ( output_ref_ind , output_res_ind , ( basename + " indices " ) . c_str ( ) , l1 ? l1 : default_l1 , lInf ? lInf : default_lInf ) ;
expectNoFallbacksFromIE ( net ) ;
} ;
test ( " top_k " ) ;
test ( " top_k_negative_axis " ) ;
test ( " top_k_smallest " ) ;
}
2018-09-11 02:07:51 +08:00
INSTANTIATE_TEST_CASE_P ( /**/ , Test_ONNX_nets , dnnBackendsAndTargets ( ) ) ;
} } // namespace