mirror of
https://github.com/opencv/opencv.git
synced 2025-06-07 17:44:04 +08:00
Enable some tests for clDNN plugin from Intel's Inference Engine
This commit is contained in:
parent
7ea5029ae5
commit
bd77d100e1
@ -15,6 +15,10 @@ macro(ie_fail)
|
||||
return()
|
||||
endmacro()
|
||||
|
||||
if(NOT HAVE_CXX11)
|
||||
ie_fail()
|
||||
endif()
|
||||
|
||||
if(NOT INF_ENGINE_ROOT_DIR OR NOT EXISTS "${INF_ENGINE_ROOT_DIR}/include/inference_engine.hpp")
|
||||
set(ie_root_paths "${INF_ENGINE_ROOT_DIR}")
|
||||
if(DEFINED ENV{INTEL_CVSDK_DIR})
|
||||
|
@ -95,24 +95,18 @@ PERF_TEST_P_(DNNTestNetwork, AlexNet)
|
||||
|
||||
PERF_TEST_P_(DNNTestNetwork, GoogLeNet)
|
||||
{
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/bvlc_googlenet.caffemodel", "dnn/bvlc_googlenet.prototxt",
|
||||
"", Mat(cv::Size(224, 224), CV_32FC3));
|
||||
}
|
||||
|
||||
PERF_TEST_P_(DNNTestNetwork, ResNet_50)
|
||||
{
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/ResNet-50-model.caffemodel", "dnn/ResNet-50-deploy.prototxt",
|
||||
"resnet_50.yml", Mat(cv::Size(224, 224), CV_32FC3));
|
||||
}
|
||||
|
||||
PERF_TEST_P_(DNNTestNetwork, SqueezeNet_v1_1)
|
||||
{
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/squeezenet_v1.1.caffemodel", "dnn/squeezenet_v1.1.prototxt",
|
||||
"squeezenet_v1_1.yml", Mat(cv::Size(227, 227), CV_32FC3));
|
||||
}
|
||||
|
@ -1255,6 +1255,15 @@ struct Net::Impl
|
||||
if (weightableLayer->_biases)
|
||||
weightableLayer->_biases = convertFp16(weightableLayer->_biases);
|
||||
}
|
||||
else
|
||||
{
|
||||
for (const auto& weights : {"weights", "biases"})
|
||||
{
|
||||
auto it = ieNode->layer->blobs.find(weights);
|
||||
if (it != ieNode->layer->blobs.end())
|
||||
it->second = convertFp16(it->second);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ieNode->connect(ld.inputBlobsWrappers, ld.outputBlobsWrappers);
|
||||
|
@ -295,6 +295,19 @@ public:
|
||||
return false;
|
||||
}
|
||||
|
||||
void finalize(const std::vector<Mat*> &inputs, std::vector<Mat> &outputs) CV_OVERRIDE
|
||||
{
|
||||
CV_Assert(inputs.size() > 1, inputs[0]->dims == 4, inputs[1]->dims == 4);
|
||||
int layerWidth = inputs[0]->size[3];
|
||||
int layerHeight = inputs[0]->size[2];
|
||||
|
||||
int imageWidth = inputs[1]->size[3];
|
||||
int imageHeight = inputs[1]->size[2];
|
||||
|
||||
_stepY = _stepY == 0 ? (static_cast<float>(imageHeight) / layerHeight) : _stepY;
|
||||
_stepX = _stepX == 0 ? (static_cast<float>(imageWidth) / layerWidth) : _stepX;
|
||||
}
|
||||
|
||||
#ifdef HAVE_OPENCL
|
||||
bool forward_ocl(InputArrayOfArrays inps, OutputArrayOfArrays outs, OutputArrayOfArrays internals)
|
||||
{
|
||||
@ -310,16 +323,6 @@ public:
|
||||
int _imageWidth = inputs[1].size[3];
|
||||
int _imageHeight = inputs[1].size[2];
|
||||
|
||||
float stepX, stepY;
|
||||
if (_stepX == 0 || _stepY == 0)
|
||||
{
|
||||
stepX = static_cast<float>(_imageWidth) / _layerWidth;
|
||||
stepY = static_cast<float>(_imageHeight) / _layerHeight;
|
||||
} else {
|
||||
stepX = _stepX;
|
||||
stepY = _stepY;
|
||||
}
|
||||
|
||||
if (umat_offsetsX.empty())
|
||||
{
|
||||
Mat offsetsX(1, _offsetsX.size(), CV_32FC1, &_offsetsX[0]);
|
||||
@ -339,8 +342,8 @@ public:
|
||||
|
||||
ocl::Kernel kernel("prior_box", ocl::dnn::prior_box_oclsrc);
|
||||
kernel.set(0, (int)nthreads);
|
||||
kernel.set(1, (float)stepX);
|
||||
kernel.set(2, (float)stepY);
|
||||
kernel.set(1, (float)_stepX);
|
||||
kernel.set(2, (float)_stepY);
|
||||
kernel.set(3, ocl::KernelArg::PtrReadOnly(umat_offsetsX));
|
||||
kernel.set(4, ocl::KernelArg::PtrReadOnly(umat_offsetsY));
|
||||
kernel.set(5, (int)_offsetsX.size());
|
||||
@ -410,15 +413,6 @@ public:
|
||||
int _imageWidth = inputs[1]->size[3];
|
||||
int _imageHeight = inputs[1]->size[2];
|
||||
|
||||
float stepX, stepY;
|
||||
if (_stepX == 0 || _stepY == 0) {
|
||||
stepX = static_cast<float>(_imageWidth) / _layerWidth;
|
||||
stepY = static_cast<float>(_imageHeight) / _layerHeight;
|
||||
} else {
|
||||
stepX = _stepX;
|
||||
stepY = _stepY;
|
||||
}
|
||||
|
||||
float* outputPtr = outputs[0].ptr<float>();
|
||||
float _boxWidth, _boxHeight;
|
||||
for (size_t h = 0; h < _layerHeight; ++h)
|
||||
@ -431,8 +425,8 @@ public:
|
||||
_boxHeight = _boxHeights[i];
|
||||
for (int j = 0; j < _offsetsX.size(); ++j)
|
||||
{
|
||||
float center_x = (w + _offsetsX[j]) * stepX;
|
||||
float center_y = (h + _offsetsY[j]) * stepY;
|
||||
float center_x = (w + _offsetsX[j]) * _stepX;
|
||||
float center_y = (h + _offsetsY[j]) * _stepY;
|
||||
outputPtr = addPrior(center_x, center_y, _boxWidth, _boxHeight, _imageWidth,
|
||||
_imageHeight, _bboxesNormalized, outputPtr);
|
||||
}
|
||||
@ -495,7 +489,7 @@ public:
|
||||
ieLayer->params["aspect_ratio"] += format(",%f", _aspectRatios[i]);
|
||||
}
|
||||
|
||||
ieLayer->params["flip"] = _flip ? "1" : "0";
|
||||
ieLayer->params["flip"] = "0"; // We already flipped aspect ratios.
|
||||
ieLayer->params["clip"] = _clip ? "1" : "0";
|
||||
|
||||
CV_Assert(!_variance.empty());
|
||||
@ -503,12 +497,20 @@ public:
|
||||
for (int i = 1; i < _variance.size(); ++i)
|
||||
ieLayer->params["variance"] += format(",%f", _variance[i]);
|
||||
|
||||
ieLayer->params["step"] = _stepX == _stepY ? format("%f", _stepX) : "0";
|
||||
ieLayer->params["step_h"] = _stepY;
|
||||
ieLayer->params["step_w"] = _stepX;
|
||||
|
||||
if (_stepX == _stepY)
|
||||
{
|
||||
ieLayer->params["step"] = format("%f", _stepX);
|
||||
ieLayer->params["step_h"] = "0.0";
|
||||
ieLayer->params["step_w"] = "0.0";
|
||||
}
|
||||
else
|
||||
{
|
||||
ieLayer->params["step"] = "0.0";
|
||||
ieLayer->params["step_h"] = format("%f", _stepY);
|
||||
ieLayer->params["step_w"] = format("%f", _stepX);
|
||||
}
|
||||
CV_Assert(_offsetsX.size() == 1, _offsetsY.size() == 1, _offsetsX[0] == _offsetsY[0]);
|
||||
ieLayer->params["offset"] = format("%f", _offsetsX[0]);;
|
||||
ieLayer->params["offset"] = format("%f", _offsetsX[0]);
|
||||
|
||||
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
|
||||
#endif // HAVE_INF_ENGINE
|
||||
|
@ -233,9 +233,17 @@ InferenceEngine::StatusCode
|
||||
InfEngineBackendNet::getLayerByName(const char *layerName, InferenceEngine::CNNLayerPtr &out,
|
||||
InferenceEngine::ResponseDesc *resp) noexcept
|
||||
{
|
||||
CV_Error(Error::StsNotImplemented, "");
|
||||
for (auto& l : layers)
|
||||
{
|
||||
if (l->name == layerName)
|
||||
{
|
||||
out = l;
|
||||
return InferenceEngine::StatusCode::OK;
|
||||
}
|
||||
}
|
||||
CV_Error(Error::StsObjectNotFound, cv::format("Cannot find a layer %s", layerName));
|
||||
return InferenceEngine::StatusCode::NOT_FOUND;
|
||||
}
|
||||
|
||||
void InfEngineBackendNet::setTargetDevice(InferenceEngine::TargetDevice device) noexcept
|
||||
{
|
||||
|
@ -23,9 +23,9 @@ public:
|
||||
}
|
||||
|
||||
void processNet(const std::string& weights, const std::string& proto,
|
||||
Size inpSize, const std::string& outputLayer,
|
||||
Size inpSize, const std::string& outputLayer = "",
|
||||
const std::string& halideScheduler = "",
|
||||
double l1 = 1e-5, double lInf = 1e-4)
|
||||
double l1 = 0.0, double lInf = 0.0)
|
||||
{
|
||||
// Create a common input blob.
|
||||
int blobSize[] = {1, 3, inpSize.height, inpSize.width};
|
||||
@ -36,9 +36,9 @@ public:
|
||||
}
|
||||
|
||||
void processNet(std::string weights, std::string proto,
|
||||
Mat inp, const std::string& outputLayer,
|
||||
Mat inp, const std::string& outputLayer = "",
|
||||
std::string halideScheduler = "",
|
||||
double l1 = 1e-5, double lInf = 1e-4)
|
||||
double l1 = 0.0, double lInf = 0.0)
|
||||
{
|
||||
if (backend == DNN_BACKEND_DEFAULT && target == DNN_TARGET_OPENCL)
|
||||
{
|
||||
@ -49,6 +49,16 @@ public:
|
||||
throw SkipTestException("OpenCL is not available/disabled in OpenCV");
|
||||
}
|
||||
}
|
||||
if (target == DNN_TARGET_OPENCL_FP16)
|
||||
{
|
||||
l1 = l1 == 0.0 ? 4e-3 : l1;
|
||||
lInf = lInf == 0.0 ? 2e-2 : lInf;
|
||||
}
|
||||
else
|
||||
{
|
||||
l1 = l1 == 0.0 ? 1e-5 : l1;
|
||||
lInf = lInf == 0.0 ? 1e-4 : lInf;
|
||||
}
|
||||
weights = findDataFile(weights, false);
|
||||
if (!proto.empty())
|
||||
proto = findDataFile(proto, false);
|
||||
@ -71,31 +81,28 @@ public:
|
||||
Mat out = net.forward(outputLayer).clone();
|
||||
|
||||
if (outputLayer == "detection_out")
|
||||
normAssertDetections(outDefault, out, "First run", 0.2);
|
||||
normAssertDetections(outDefault, out, "First run", 0.2, l1, lInf);
|
||||
else
|
||||
normAssert(outDefault, out, "First run", l1, lInf);
|
||||
|
||||
// Test 2: change input.
|
||||
inp *= 0.1f;
|
||||
float* inpData = (float*)inp.data;
|
||||
for (int i = 0; i < inp.size[0] * inp.size[1]; ++i)
|
||||
{
|
||||
Mat slice(inp.size[2], inp.size[3], CV_32F, inpData);
|
||||
cv::flip(slice, slice, 1);
|
||||
inpData += slice.total();
|
||||
}
|
||||
netDefault.setInput(inp);
|
||||
net.setInput(inp);
|
||||
outDefault = netDefault.forward(outputLayer).clone();
|
||||
out = net.forward(outputLayer).clone();
|
||||
|
||||
if (outputLayer == "detection_out")
|
||||
checkDetections(outDefault, out, "Second run", l1, lInf);
|
||||
normAssertDetections(outDefault, out, "Second run", 0.2, l1, lInf);
|
||||
else
|
||||
normAssert(outDefault, out, "Second run", l1, lInf);
|
||||
}
|
||||
|
||||
void checkDetections(const Mat& out, const Mat& ref, const std::string& msg,
|
||||
float l1, float lInf, int top = 5)
|
||||
{
|
||||
top = std::min(std::min(top, out.size[2]), out.size[3]);
|
||||
std::vector<cv::Range> range(4, cv::Range::all());
|
||||
range[2] = cv::Range(0, top);
|
||||
normAssert(out(range), ref(range));
|
||||
}
|
||||
};
|
||||
|
||||
TEST_P(DNNTestNetwork, AlexNet)
|
||||
@ -110,8 +117,6 @@ TEST_P(DNNTestNetwork, AlexNet)
|
||||
|
||||
TEST_P(DNNTestNetwork, ResNet_50)
|
||||
{
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/ResNet-50-model.caffemodel", "dnn/ResNet-50-deploy.prototxt",
|
||||
Size(224, 224), "prob",
|
||||
target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_resnet_50.yml" :
|
||||
@ -120,8 +125,6 @@ TEST_P(DNNTestNetwork, ResNet_50)
|
||||
|
||||
TEST_P(DNNTestNetwork, SqueezeNet_v1_1)
|
||||
{
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/squeezenet_v1.1.caffemodel", "dnn/squeezenet_v1.1.prototxt",
|
||||
Size(227, 227), "prob",
|
||||
target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_squeezenet_v1_1.yml" :
|
||||
@ -130,8 +133,6 @@ TEST_P(DNNTestNetwork, SqueezeNet_v1_1)
|
||||
|
||||
TEST_P(DNNTestNetwork, GoogLeNet)
|
||||
{
|
||||
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/bvlc_googlenet.caffemodel", "dnn/bvlc_googlenet.prototxt",
|
||||
Size(224, 224), "prob");
|
||||
}
|
||||
@ -180,7 +181,7 @@ TEST_P(DNNTestNetwork, SSD_VGG16)
|
||||
{
|
||||
if (backend == DNN_BACKEND_DEFAULT && target == DNN_TARGET_OPENCL ||
|
||||
backend == DNN_BACKEND_HALIDE && target == DNN_TARGET_CPU ||
|
||||
backend == DNN_BACKEND_INFERENCE_ENGINE)
|
||||
backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU)
|
||||
throw SkipTestException("");
|
||||
processNet("dnn/VGG_ILSVRC2016_SSD_300x300_iter_440000.caffemodel",
|
||||
"dnn/ssd_vgg16.prototxt", Size(300, 300), "detection_out");
|
||||
@ -189,30 +190,24 @@ TEST_P(DNNTestNetwork, SSD_VGG16)
|
||||
TEST_P(DNNTestNetwork, OpenPose_pose_coco)
|
||||
{
|
||||
if (backend == DNN_BACKEND_HALIDE) throw SkipTestException("");
|
||||
double l1 = target == DNN_TARGET_OPENCL_FP16 ? 3e-5 : 1e-5;
|
||||
double lInf = target == DNN_TARGET_OPENCL_FP16 ? 3e-3 : 1e-4;
|
||||
processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt",
|
||||
Size(368, 368), "", "", l1, lInf);
|
||||
Size(368, 368));
|
||||
}
|
||||
|
||||
TEST_P(DNNTestNetwork, OpenPose_pose_mpi)
|
||||
{
|
||||
if (backend == DNN_BACKEND_HALIDE) throw SkipTestException("");
|
||||
double l1 = target == DNN_TARGET_OPENCL_FP16 ? 4e-5 : 1e-5;
|
||||
double lInf = target == DNN_TARGET_OPENCL_FP16 ? 7e-3 : 1e-4;
|
||||
processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt",
|
||||
Size(368, 368), "", "", l1, lInf);
|
||||
Size(368, 368));
|
||||
}
|
||||
|
||||
TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages)
|
||||
{
|
||||
if (backend == DNN_BACKEND_HALIDE) throw SkipTestException("");
|
||||
double l1 = target == DNN_TARGET_OPENCL_FP16 ? 5e-5 : 1e-5;
|
||||
double lInf = target == DNN_TARGET_OPENCL_FP16 ? 5e-3 : 1e-4;
|
||||
// The same .caffemodel but modified .prototxt
|
||||
// See https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/src/openpose/pose/poseParameters.cpp
|
||||
processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi_faster_4_stages.prototxt",
|
||||
Size(368, 368), "", "", l1, lInf);
|
||||
Size(368, 368));
|
||||
}
|
||||
|
||||
TEST_P(DNNTestNetwork, OpenFace)
|
||||
|
Loading…
Reference in New Issue
Block a user