mirror of
https://github.com/opencv/opencv.git
synced 2025-08-05 22:19:14 +08:00
Flexible inputs for OpenVINO IR models
This commit is contained in:
parent
e59e978fcd
commit
908bf935f7
@ -1629,6 +1629,7 @@ struct Net::Impl
|
||||
|
||||
Ptr<InfEngineBackendNode> ieNode = node.dynamicCast<InfEngineBackendNode>();
|
||||
CV_Assert(!ieNode.empty());
|
||||
ieNode->net->reset();
|
||||
|
||||
for (it = layers.begin(); it != layers.end(); ++it)
|
||||
{
|
||||
@ -1930,6 +1931,7 @@ struct Net::Impl
|
||||
|
||||
Ptr<InfEngineNgraphNode> ieNode = node.dynamicCast<InfEngineNgraphNode>();
|
||||
CV_Assert(!ieNode.empty());
|
||||
ieNode->net->reset();
|
||||
|
||||
for (it = layers.begin(); it != layers.end(); ++it)
|
||||
{
|
||||
|
@ -780,6 +780,13 @@ void forwardNgraph(const std::vector<Ptr<BackendWrapper> >& outBlobsWrappers,
|
||||
ieNode->net->forward(outBlobsWrappers, isAsync);
|
||||
}
|
||||
|
||||
void InfEngineNgraphNet::reset()
|
||||
{
|
||||
allBlobs.clear();
|
||||
infRequests.clear();
|
||||
isInit = false;
|
||||
}
|
||||
|
||||
void InfEngineNgraphNet::addBlobs(const std::vector<cv::Ptr<BackendWrapper> >& ptrs)
|
||||
{
|
||||
auto wrappers = ngraphWrappers(ptrs);
|
||||
|
@ -52,6 +52,8 @@ public:
|
||||
|
||||
void createNet(Target targetId);
|
||||
void setNodePtr(std::shared_ptr<ngraph::Node>* ptr);
|
||||
|
||||
void reset();
|
||||
private:
|
||||
void release();
|
||||
int getNumComponents();
|
||||
|
@ -891,6 +891,13 @@ bool InfEngineBackendNet::isInitialized()
|
||||
#endif
|
||||
}
|
||||
|
||||
void InfEngineBackendNet::reset()
|
||||
{
|
||||
allBlobs.clear();
|
||||
infRequests.clear();
|
||||
isInit = false;
|
||||
}
|
||||
|
||||
void InfEngineBackendNet::addBlobs(const std::vector<cv::Ptr<BackendWrapper> >& ptrs)
|
||||
{
|
||||
auto wrappers = infEngineWrappers(ptrs);
|
||||
|
@ -112,6 +112,8 @@ public:
|
||||
|
||||
void addBlobs(const std::vector<cv::Ptr<BackendWrapper> >& ptrs);
|
||||
|
||||
void reset();
|
||||
|
||||
private:
|
||||
InferenceEngine::Builder::Network netBuilder;
|
||||
|
||||
|
@ -760,6 +760,48 @@ TEST_P(Test_Model_Optimizer, readFromBuffer)
|
||||
normAssert(ref, actual, "", 0, 0);
|
||||
}
|
||||
|
||||
TEST_P(Test_Model_Optimizer, flexible_inputs)
|
||||
{
|
||||
const Backend backendId = get<0>(GetParam());
|
||||
const Target targetId = get<1>(GetParam());
|
||||
|
||||
const std::string& model = findDataFile("dnn/layers/layer_convolution_fp16.bin");
|
||||
const std::string& proto = findDataFile("dnn/layers/layer_convolution_fp16.xml");
|
||||
|
||||
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
|
||||
setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_API);
|
||||
else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
|
||||
setInferenceEngineBackendType(CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
|
||||
else
|
||||
FAIL() << "Unknown backendId";
|
||||
|
||||
Net net0 = readNet(model, proto);
|
||||
net0.setPreferableTarget(targetId);
|
||||
|
||||
Net net1 = readNet(model, proto);
|
||||
net1.setPreferableTarget(targetId);
|
||||
|
||||
// Generate inputs.
|
||||
int blobSize0[] = {2, 6, 75, 113};
|
||||
Mat input0(4, &blobSize0[0], CV_32F);
|
||||
randu(input0, 0, 255);
|
||||
|
||||
net0.setInput(input0);
|
||||
Mat ref = net0.forward().clone();
|
||||
|
||||
int blobSize1[] = {1, 6, 10, 9};
|
||||
Mat input1(4, &blobSize1[0], CV_32F);
|
||||
randu(input1, 0, 255);
|
||||
|
||||
net1.setInput(input1);
|
||||
Mat out = net1.forward();
|
||||
EXPECT_NE(out.size, ref.size);
|
||||
|
||||
net1.setInput(input0);
|
||||
out = net1.forward();
|
||||
normAssert(ref, out, 0, 0);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(/**/, Test_Model_Optimizer,
|
||||
dnnBackendsAndTargetsIE()
|
||||
);
|
||||
|
Loading…
Reference in New Issue
Block a user