mirror of
https://github.com/opencv/opencv.git
synced 2024-11-25 11:40:44 +08:00
Make Inference Engine R3 as a minimal supported version
This commit is contained in:
parent
1db5d82b7f
commit
ed710eaa1c
@ -162,24 +162,6 @@ PERF_TEST_P_(DNNTestNetwork, DenseNet_121)
|
||||
Mat(cv::Size(224, 224), CV_32FC3));
|
||||
}
|
||||
|
||||
PERF_TEST_P_(DNNTestNetwork, OpenPose_pose_coco)
|
||||
{
|
||||
if (backend == DNN_BACKEND_HALIDE ||
|
||||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt", "",
|
||||
Mat(cv::Size(368, 368), CV_32FC3));
|
||||
}
|
||||
|
||||
PERF_TEST_P_(DNNTestNetwork, OpenPose_pose_mpi)
|
||||
{
|
||||
if (backend == DNN_BACKEND_HALIDE ||
|
||||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt", "",
|
||||
Mat(cv::Size(368, 368), CV_32FC3));
|
||||
}
|
||||
|
||||
PERF_TEST_P_(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages)
|
||||
{
|
||||
if (backend == DNN_BACKEND_HALIDE ||
|
||||
@ -219,11 +201,7 @@ PERF_TEST_P_(DNNTestNetwork, YOLOv3)
|
||||
|
||||
PERF_TEST_P_(DNNTestNetwork, EAST_text_detection)
|
||||
{
|
||||
if (backend == DNN_BACKEND_HALIDE
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000
|
||||
|| (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
|
||||
#endif
|
||||
)
|
||||
if (backend == DNN_BACKEND_HALIDE)
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/frozen_east_text_detection.pb", "", "", Mat(cv::Size(320, 320), CV_32FC3));
|
||||
}
|
||||
|
@ -1161,10 +1161,7 @@ public:
|
||||
const int group = numOutput / outGroupCn;
|
||||
if (group != 1)
|
||||
{
|
||||
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R3)
|
||||
return preferableTarget == DNN_TARGET_CPU;
|
||||
#endif
|
||||
return false;
|
||||
}
|
||||
if (preferableTarget == DNN_TARGET_OPENCL || preferableTarget == DNN_TARGET_OPENCL_FP16)
|
||||
return dilation.width == 1 && dilation.height == 1;
|
||||
|
@ -541,7 +541,6 @@ size_t InfEngineBackendNet::getBatchSize() const noexcept
|
||||
return batchSize;
|
||||
}
|
||||
|
||||
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R2)
|
||||
InferenceEngine::StatusCode InfEngineBackendNet::AddExtension(const InferenceEngine::IShapeInferExtensionPtr &extension, InferenceEngine::ResponseDesc *resp) noexcept
|
||||
{
|
||||
CV_Error(Error::StsNotImplemented, "");
|
||||
@ -553,7 +552,6 @@ InferenceEngine::StatusCode InfEngineBackendNet::reshape(const InferenceEngine::
|
||||
CV_Error(Error::StsNotImplemented, "");
|
||||
return InferenceEngine::StatusCode::OK;
|
||||
}
|
||||
#endif
|
||||
|
||||
void InfEngineBackendNet::init(int targetId)
|
||||
{
|
||||
|
@ -22,8 +22,6 @@
|
||||
//#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
||||
#define INF_ENGINE_RELEASE_2018R1 2018010000
|
||||
#define INF_ENGINE_RELEASE_2018R2 2018020000
|
||||
#define INF_ENGINE_RELEASE_2018R3 2018030000
|
||||
#define INF_ENGINE_RELEASE_2018R4 2018040000
|
||||
#define INF_ENGINE_RELEASE_2018R5 2018050000
|
||||
|
@ -250,7 +250,7 @@ TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages)
|
||||
TEST_P(DNNTestNetwork, OpenFace)
|
||||
{
|
||||
#if defined(INF_ENGINE_RELEASE)
|
||||
#if (INF_ENGINE_RELEASE < 2018030000 || INF_ENGINE_RELEASE == 2018050000)
|
||||
#if INF_ENGINE_RELEASE == 2018050000
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
|
||||
throw SkipTestException("");
|
||||
#elif INF_ENGINE_RELEASE < 2018040000
|
||||
|
@ -285,29 +285,9 @@ public:
|
||||
{
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
|
||||
{
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000
|
||||
if (inp && ref && inp->size[0] != 1)
|
||||
{
|
||||
// Myriad plugin supports only batch size 1. Slice a single sample.
|
||||
if (inp->size[0] == ref->size[0])
|
||||
{
|
||||
std::vector<cv::Range> range(inp->dims, Range::all());
|
||||
range[0] = Range(0, 1);
|
||||
*inp = inp->operator()(range);
|
||||
|
||||
range = std::vector<cv::Range>(ref->dims, Range::all());
|
||||
range[0] = Range(0, 1);
|
||||
*ref = ref->operator()(range);
|
||||
}
|
||||
else
|
||||
throw SkipTestException("Myriad plugin supports only batch size 1");
|
||||
}
|
||||
#else
|
||||
if (inp && ref && inp->dims == 4 && ref->dims == 4 &&
|
||||
inp->size[0] != 1 && inp->size[0] != ref->size[0])
|
||||
throw SkipTestException("Inconsistent batch size of input and output blobs for Myriad plugin");
|
||||
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -93,11 +93,6 @@ TEST_P(Convolution, Accuracy)
|
||||
Backend backendId = get<0>(get<7>(GetParam()));
|
||||
Target targetId = get<1>(get<7>(GetParam()));
|
||||
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000
|
||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD)
|
||||
throw SkipTestException("Test is enabled starts from OpenVINO 2018R3");
|
||||
#endif
|
||||
|
||||
bool skipCheck = false;
|
||||
|
||||
int sz[] = {outChannels, inChannels / group, kernel.height, kernel.width};
|
||||
|
@ -220,10 +220,6 @@ TEST(Layer_Test_Reshape, Accuracy)
|
||||
|
||||
TEST_P(Test_Caffe_layers, BatchNorm)
|
||||
{
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
|
||||
throw SkipTestException("Test is enabled starts from OpenVINO 2018R3");
|
||||
#endif
|
||||
testLayerUsingCaffeModels("layer_batch_norm", true);
|
||||
testLayerUsingCaffeModels("layer_batch_norm_local_stats", true, false);
|
||||
}
|
||||
@ -741,10 +737,6 @@ INSTANTIATE_TEST_CASE_P(Layer_Test, Crop, Combine(
|
||||
// into the normalization area.
|
||||
TEST_P(Test_Caffe_layers, Average_pooling_kernel_area)
|
||||
{
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
|
||||
throw SkipTestException("Test is enabled starts from OpenVINO 2018R3");
|
||||
#endif
|
||||
LayerParams lp;
|
||||
lp.name = "testAvePool";
|
||||
lp.type = "Pooling";
|
||||
|
@ -147,10 +147,6 @@ TEST_P(Test_TensorFlow_layers, eltwise)
|
||||
|
||||
TEST_P(Test_TensorFlow_layers, pad_and_concat)
|
||||
{
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
|
||||
throw SkipTestException("Test is enabled starts from OpenVINO 2018R3");
|
||||
#endif
|
||||
runTensorFlowNet("pad_and_concat");
|
||||
}
|
||||
|
||||
@ -185,10 +181,6 @@ TEST_P(Test_TensorFlow_layers, pooling)
|
||||
// TODO: fix tests and replace to pooling
|
||||
TEST_P(Test_TensorFlow_layers, ave_pool_same)
|
||||
{
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
|
||||
throw SkipTestException("Test is enabled starts from OpenVINO 2018R3");
|
||||
#endif
|
||||
runTensorFlowNet("ave_pool_same");
|
||||
}
|
||||
|
||||
@ -453,11 +445,6 @@ TEST_P(Test_TensorFlow_nets, opencv_face_detector_uint8)
|
||||
TEST_P(Test_TensorFlow_nets, EAST_text_detection)
|
||||
{
|
||||
checkBackend();
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
|
||||
throw SkipTestException("Test is enabled starts from OpenVINO 2018R3");
|
||||
#endif
|
||||
|
||||
std::string netPath = findDataFile("dnn/frozen_east_text_detection.pb", false);
|
||||
std::string imgPath = findDataFile("cv/ximgproc/sources/08.png", false);
|
||||
std::string refScoresPath = findDataFile("dnn/east_text_detection.scores.npy", false);
|
||||
@ -516,17 +503,6 @@ TEST_P(Test_TensorFlow_layers, fp16_weights)
|
||||
runTensorFlowNet("fp16_max_pool_odd_valid", false, l1, lInf);
|
||||
runTensorFlowNet("fp16_max_pool_even", false, l1, lInf);
|
||||
runTensorFlowNet("fp16_padding_same", false, l1, lInf);
|
||||
}
|
||||
|
||||
// TODO: fix pad_and_concat and add this test case to fp16_weights
|
||||
TEST_P(Test_TensorFlow_layers, fp16_pad_and_concat)
|
||||
{
|
||||
const float l1 = 0.00071;
|
||||
const float lInf = 0.012;
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
|
||||
throw SkipTestException("Test is enabled starts from OpenVINO 2018R3");
|
||||
#endif
|
||||
runTensorFlowNet("fp16_pad_and_concat", false, l1, lInf);
|
||||
}
|
||||
|
||||
|
@ -272,7 +272,7 @@ class Test_Torch_nets : public DNNTestLayer {};
|
||||
|
||||
TEST_P(Test_Torch_nets, OpenFace_accuracy)
|
||||
{
|
||||
#if defined(INF_ENGINE_RELEASE) && (INF_ENGINE_RELEASE < 2018030000 || INF_ENGINE_RELEASE == 2018050000)
|
||||
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
|
||||
throw SkipTestException("");
|
||||
#endif
|
||||
|
Loading…
Reference in New Issue
Block a user