mirror of
https://github.com/opencv/opencv.git
synced 2025-08-06 14:36:36 +08:00
dnn(ie): replace deprecated calls
This commit is contained in:
parent
6797fd65a5
commit
f2057ce1ab
@ -654,7 +654,11 @@ void InfEngineNgraphNet::initPlugin(InferenceEngine::CNNNetwork& net)
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
InferenceEngine::IExtensionPtr extension =
|
InferenceEngine::IExtensionPtr extension =
|
||||||
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2021_4)
|
||||||
|
std::make_shared<InferenceEngine::Extension>(libName);
|
||||||
|
#else
|
||||||
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(libName);
|
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(libName);
|
||||||
|
#endif
|
||||||
|
|
||||||
ie.AddExtension(extension, "CPU");
|
ie.AddExtension(extension, "CPU");
|
||||||
CV_LOG_INFO(NULL, "DNN-IE: Loaded extension plugin: " << libName);
|
CV_LOG_INFO(NULL, "DNN-IE: Loaded extension plugin: " << libName);
|
||||||
@ -1002,35 +1006,54 @@ void InfEngineNgraphNet::forward(const std::vector<Ptr<BackendWrapper> >& outBlo
|
|||||||
reqWrapper->req.SetInput(inpBlobs);
|
reqWrapper->req.SetInput(inpBlobs);
|
||||||
reqWrapper->req.SetOutput(outBlobs);
|
reqWrapper->req.SetOutput(outBlobs);
|
||||||
|
|
||||||
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2021_4)
|
||||||
|
InferenceEngine::InferRequest infRequest = reqWrapper->req;
|
||||||
|
NgraphReqWrapper* wrapperPtr = reqWrapper.get();
|
||||||
|
CV_Assert(wrapperPtr && "Internal error");
|
||||||
|
#else
|
||||||
InferenceEngine::IInferRequest::Ptr infRequestPtr = reqWrapper->req;
|
InferenceEngine::IInferRequest::Ptr infRequestPtr = reqWrapper->req;
|
||||||
infRequestPtr->SetUserData(reqWrapper.get(), 0);
|
CV_Assert(infRequestPtr);
|
||||||
|
InferenceEngine::IInferRequest& infRequest = *infRequestPtr.get();
|
||||||
|
infRequest.SetUserData(reqWrapper.get(), 0);
|
||||||
|
#endif
|
||||||
|
|
||||||
infRequestPtr->SetCompletionCallback(
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2021_4)
|
||||||
[](InferenceEngine::IInferRequest::Ptr request, InferenceEngine::StatusCode status)
|
// do NOT capture 'reqWrapper' (smart ptr) in the lambda callback
|
||||||
|
infRequest.SetCompletionCallback<std::function<void(InferenceEngine::InferRequest, InferenceEngine::StatusCode)>>(
|
||||||
|
[wrapperPtr](InferenceEngine::InferRequest /*request*/, InferenceEngine::StatusCode status)
|
||||||
|
#else
|
||||||
|
infRequest.SetCompletionCallback(
|
||||||
|
[](InferenceEngine::IInferRequest::Ptr requestPtr, InferenceEngine::StatusCode status)
|
||||||
|
#endif
|
||||||
{
|
{
|
||||||
CV_LOG_DEBUG(NULL, "DNN(nGraph): completionCallback(" << (int)status << ")");
|
CV_LOG_DEBUG(NULL, "DNN(nGraph): completionCallback(" << (int)status << ")");
|
||||||
|
#if !INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2021_4)
|
||||||
|
CV_Assert(requestPtr);
|
||||||
|
InferenceEngine::IInferRequest& request = *requestPtr.get();
|
||||||
|
|
||||||
NgraphReqWrapper* wrapper;
|
NgraphReqWrapper* wrapperPtr;
|
||||||
request->GetUserData((void**)&wrapper, 0);
|
request.GetUserData((void**)&wrapperPtr, 0);
|
||||||
CV_Assert(wrapper && "Internal error");
|
CV_Assert(wrapperPtr && "Internal error");
|
||||||
|
#endif
|
||||||
|
NgraphReqWrapper& wrapper = *wrapperPtr;
|
||||||
|
|
||||||
size_t processedOutputs = 0;
|
size_t processedOutputs = 0;
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
for (; processedOutputs < wrapper->outProms.size(); ++processedOutputs)
|
for (; processedOutputs < wrapper.outProms.size(); ++processedOutputs)
|
||||||
{
|
{
|
||||||
const std::string& name = wrapper->outsNames[processedOutputs];
|
const std::string& name = wrapper.outsNames[processedOutputs];
|
||||||
Mat m = ngraphBlobToMat(wrapper->req.GetBlob(name));
|
Mat m = ngraphBlobToMat(wrapper.req.GetBlob(name));
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
CV_Assert(status == InferenceEngine::StatusCode::OK);
|
CV_Assert(status == InferenceEngine::StatusCode::OK);
|
||||||
wrapper->outProms[processedOutputs].setValue(m.clone());
|
wrapper.outProms[processedOutputs].setValue(m.clone());
|
||||||
}
|
}
|
||||||
catch (...)
|
catch (...)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
wrapper->outProms[processedOutputs].setException(std::current_exception());
|
wrapper.outProms[processedOutputs].setException(std::current_exception());
|
||||||
} catch(...) {
|
} catch(...) {
|
||||||
CV_LOG_ERROR(NULL, "DNN: Exception occurred during async inference exception propagation");
|
CV_LOG_ERROR(NULL, "DNN: Exception occurred during async inference exception propagation");
|
||||||
}
|
}
|
||||||
@ -1040,16 +1063,16 @@ void InfEngineNgraphNet::forward(const std::vector<Ptr<BackendWrapper> >& outBlo
|
|||||||
catch (...)
|
catch (...)
|
||||||
{
|
{
|
||||||
std::exception_ptr e = std::current_exception();
|
std::exception_ptr e = std::current_exception();
|
||||||
for (; processedOutputs < wrapper->outProms.size(); ++processedOutputs)
|
for (; processedOutputs < wrapper.outProms.size(); ++processedOutputs)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
wrapper->outProms[processedOutputs].setException(e);
|
wrapper.outProms[processedOutputs].setException(e);
|
||||||
} catch(...) {
|
} catch(...) {
|
||||||
CV_LOG_ERROR(NULL, "DNN: Exception occurred during async inference exception propagation");
|
CV_LOG_ERROR(NULL, "DNN: Exception occurred during async inference exception propagation");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
wrapper->isReady = true;
|
wrapper.isReady = true;
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user