Merge pull request #20528 from TolyaTalamanov:at/fix-overwrite-blob-precision-bug

[G-API] Prohibit setPrecision & preprocessing for importedNetworks

* Prohibit preprocessing for imported networks

* Fix typo

input_names -> output_names

* Move setBlob logic to separate function

* Change comment
This commit is contained in:
Anatoliy Talamanov 2021-08-18 10:42:32 +03:00 committed by GitHub
parent 46fb88c76f
commit 95919051e0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 120 additions and 93 deletions

View File

@ -243,10 +243,6 @@ struct IEUnit {
this_plugin = cv::gimpl::ie::wrap::getPlugin(params);
this_plugin.SetConfig(params.config);
this_network = cv::gimpl::ie::wrap::importNetwork(this_plugin, params, rctx);
// FIXME: ICNNetwork returns InputsDataMap/OutputsDataMap,
// but ExecutableNetwork returns ConstInputsDataMap/ConstOutputsDataMap
inputs = cv::gimpl::ie::wrap::toInputsDataMap(this_network.GetInputsInfo());
outputs = cv::gimpl::ie::wrap::toOutputsDataMap(this_network.GetOutputsInfo());
if (!params.reshape_table.empty() || !params.layer_names_to_reshape.empty()) {
GAPI_LOG_WARNING(NULL, "Reshape isn't supported for imported network");
}
@ -270,10 +266,18 @@ struct IEUnit {
+ params.model_path));
}
if (params.num_in == 1u && params.input_names.empty()) {
params.input_names = { inputs.begin()->first };
if (params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Load) {
params.input_names = { inputs.begin()->first };
} else {
params.input_names = { this_network.GetInputsInfo().begin()->first };
}
}
if (params.num_out == 1u && params.output_names.empty()) {
params.output_names = { outputs.begin()->first };
if (params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Load) {
params.output_names = { outputs.begin()->first };
} else {
params.output_names = { this_network.GetOutputsInfo().begin()->first };
}
}
if (!params.reshape_table.empty()) {
GAPI_Assert((params.reshape_table.size() + params.layer_names_to_reshape.size()) <=
@ -533,6 +537,24 @@ inline IE::Blob::Ptr extractBlob(IECallContext& ctx, std::size_t i) {
}
GAPI_Assert(false);
}
static void setBlob(InferenceEngine::InferRequest& req,
cv::gapi::ie::detail::ParamDesc::Kind kind,
const std::string& layer_name,
IE::Blob::Ptr blob) {
// NB: In case importNetwork preprocessing must be
// passed as SetBlob argument.
if (kind == cv::gapi::ie::detail::ParamDesc::Kind::Load) {
req.SetBlob(layer_name, blob);
} else {
GAPI_Assert(kind == cv::gapi::ie::detail::ParamDesc::Kind::Import);
IE::PreProcessInfo info;
info.setResizeAlgorithm(IE::RESIZE_BILINEAR);
req.SetBlob(layer_name, blob, info);
}
}
} // anonymous namespace
std::vector<InferenceEngine::InferRequest> cv::gimpl::ie::IECompiled::createInferRequests() {
@ -891,25 +913,30 @@ struct Infer: public cv::detail::KernelTag {
// meta order.
GAPI_Assert(uu.params.input_names.size() == in_metas.size()
&& "Known input layers count doesn't match input meta count");
for (auto &&it : ade::util::zip(ade::util::toRange(uu.params.input_names),
ade::util::toRange(in_metas))) {
const auto &input_name = std::get<0>(it);
auto &&ii = uu.inputs.at(input_name);
const auto & mm = std::get<1>(it);
configureInputInfo(ii, mm);
if (uu.params.layer_names_to_reshape.find(input_name) !=
uu.params.layer_names_to_reshape.end()) {
configureInputReshapeByImage(ii, mm, input_reshape_table);
// NB: Configuring input precision and network reshape must be done
// only in the loadNetwork case.
if (uu.params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Load) {
for (auto &&it : ade::util::zip(ade::util::toRange(uu.params.input_names),
ade::util::toRange(in_metas))) {
const auto &input_name = std::get<0>(it);
auto &&ii = uu.inputs.at(input_name);
const auto & mm = std::get<1>(it);
configureInputInfo(ii, mm);
if (uu.params.layer_names_to_reshape.find(input_name) !=
uu.params.layer_names_to_reshape.end()) {
configureInputReshapeByImage(ii, mm, input_reshape_table);
}
ii->getPreProcess().setResizeAlgorithm(IE::RESIZE_BILINEAR);
}
ii->getPreProcess().setResizeAlgorithm(IE::RESIZE_BILINEAR);
}
// FIXME: This isn't the best place to call reshape function.
// Сorrect solution would be to do this in compile() method of network,
// but now input meta isn't passed to compile() method.
if (!input_reshape_table.empty()) {
const_cast<IE::CNNNetwork *>(&uu.net)->reshape(input_reshape_table);
// FIXME: This isn't the best place to call reshape function.
// Сorrect solution would be to do this in compile() method of network,
// but now input meta isn't passed to compile() method.
if (!input_reshape_table.empty()) {
const_cast<IE::CNNNetwork *>(&uu.net)->reshape(input_reshape_table);
}
}
// FIXME: It would be nice here to have an exact number of network's
@ -941,7 +968,10 @@ struct Infer: public cv::detail::KernelTag {
// and redirect our data producers to this memory
// (A memory dialog comes to the picture again)
IE::Blob::Ptr this_blob = extractBlob(*ctx, i);
req.SetBlob(ctx->uu.params.input_names[i], this_blob);
setBlob(req,
ctx->uu.params.kind,
ctx->uu.params.input_names[i],
this_blob);
}
// FIXME: Should it be done by kernel ?
// What about to do that in RequestPool ?
@ -973,22 +1003,26 @@ struct InferROI: public cv::detail::KernelTag {
GAPI_Assert(1u == uu.params.input_names.size());
GAPI_Assert(2u == in_metas.size());
// 0th is ROI, 1st is input image
const auto &input_name = uu.params.input_names.at(0);
auto &&ii = uu.inputs.at(input_name);
auto &&mm = in_metas.at(1u);
configureInputInfo(ii, mm);
if (uu.params.layer_names_to_reshape.find(input_name) !=
uu.params.layer_names_to_reshape.end()) {
configureInputReshapeByImage(ii, mm, input_reshape_table);
}
ii->getPreProcess().setResizeAlgorithm(IE::RESIZE_BILINEAR);
// NB: Configuring input precision and network reshape must be done
// only in the loadNetwork case.
if (uu.params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Load) {
// 0th is ROI, 1st is input image
const auto &input_name = uu.params.input_names.at(0);
auto &&ii = uu.inputs.at(input_name);
auto &&mm = in_metas.at(1u);
configureInputInfo(ii, mm);
if (uu.params.layer_names_to_reshape.find(input_name) !=
uu.params.layer_names_to_reshape.end()) {
configureInputReshapeByImage(ii, mm, input_reshape_table);
}
ii->getPreProcess().setResizeAlgorithm(IE::RESIZE_BILINEAR);
// FIXME: This isn't the best place to call reshape function.
// Сorrect solution would be to do this in compile() method of network,
// but now input meta isn't passed to compile() method.
if (!input_reshape_table.empty()) {
const_cast<IE::CNNNetwork *>(&uu.net)->reshape(input_reshape_table);
// FIXME: This isn't the best place to call reshape function.
// Сorrect solution would be to do this in compile() method of network,
// but now input meta isn't passed to compile() method.
if (!input_reshape_table.empty()) {
const_cast<IE::CNNNetwork *>(&uu.net)->reshape(input_reshape_table);
}
}
// FIXME: It would be nice here to have an exact number of network's
@ -1017,10 +1051,11 @@ struct InferROI: public cv::detail::KernelTag {
auto&& this_roi = ctx->inArg<cv::detail::OpaqueRef>(0).rref<cv::Rect>();
IE::Blob::Ptr this_blob = extractBlob(*ctx, 1);
req.SetBlob(*(ctx->uu.params.input_names.begin()),
IE::make_shared_blob(this_blob, toIE(this_roi)));
setBlob(req,
ctx->uu.params.kind,
*(ctx->uu.params.input_names.begin()),
IE::make_shared_blob(this_blob,
toIE(this_roi)));
// FIXME: Should it be done by kernel ?
// What about to do that in RequestPool ?
req.StartAsync();
@ -1055,23 +1090,27 @@ struct InferList: public cv::detail::KernelTag {
GAPI_Assert(uu.params.input_names.size() == (in_metas.size() - 1u)
&& "Known input layers count doesn't match input meta count");
std::size_t idx = 1u;
for (auto &&input_name : uu.params.input_names) {
auto &&ii = uu.inputs.at(input_name);
const auto & mm = in_metas[idx++];
configureInputInfo(ii, mm);
if (uu.params.layer_names_to_reshape.find(input_name) !=
uu.params.layer_names_to_reshape.end()) {
configureInputReshapeByImage(ii, mm, input_reshape_table);
// NB: Configuring input precision and network reshape must be done
// only in the loadNetwork case.
if (uu.params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Load) {
std::size_t idx = 1u;
for (auto &&input_name : uu.params.input_names) {
auto &&ii = uu.inputs.at(input_name);
const auto & mm = in_metas[idx++];
configureInputInfo(ii, mm);
if (uu.params.layer_names_to_reshape.find(input_name) !=
uu.params.layer_names_to_reshape.end()) {
configureInputReshapeByImage(ii, mm, input_reshape_table);
}
ii->getPreProcess().setResizeAlgorithm(IE::RESIZE_BILINEAR);
}
ii->getPreProcess().setResizeAlgorithm(IE::RESIZE_BILINEAR);
}
// FIXME: This isn't the best place to call reshape function.
// Сorrect solution would be to do this in compile() method of network,
// but now input meta isn't passed to compile() method.
if (!input_reshape_table.empty()) {
const_cast<IE::CNNNetwork *>(&uu.net)->reshape(input_reshape_table);
// FIXME: This isn't the best place to call reshape function.
// Сorrect solution would be to do this in compile() method of network,
// but now input meta isn't passed to compile() method.
if (!input_reshape_table.empty()) {
const_cast<IE::CNNNetwork *>(&uu.net)->reshape(input_reshape_table);
}
}
// roi-list version is much easier at the moment.
@ -1117,7 +1156,10 @@ struct InferList: public cv::detail::KernelTag {
cv::gimpl::ie::RequestPool::Task {
[ctx, rc, this_blob](InferenceEngine::InferRequest &req) {
IE::Blob::Ptr roi_blob = IE::make_shared_blob(this_blob, toIE(rc));
req.SetBlob(ctx->uu.params.input_names[0u], roi_blob);
setBlob(req,
ctx->uu.params.kind,
ctx->uu.params.input_names[0u],
roi_blob);
req.StartAsync();
},
std::bind(callback, std::placeholders::_1, pos)
@ -1191,19 +1233,23 @@ struct InferList2: public cv::detail::KernelTag {
&& "Non-array inputs are not supported");
if (op.k.inKinds[idx] == cv::detail::OpaqueKind::CV_RECT) {
// This is a cv::Rect -- configure the IE preprocessing
configureInputInfo(ii, mm_0);
if (uu.params.layer_names_to_reshape.find(input_name) !=
uu.params.layer_names_to_reshape.end()) {
configureInputReshapeByImage(ii, mm_0, input_reshape_table);
}
ii->getPreProcess().setResizeAlgorithm(IE::RESIZE_BILINEAR);
// NB: Configuring input precision and network reshape must be done
// only in the loadNetwork case.
if (uu.params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Load) {
// This is a cv::Rect -- configure the IE preprocessing
configureInputInfo(ii, mm_0);
if (uu.params.layer_names_to_reshape.find(input_name) !=
uu.params.layer_names_to_reshape.end()) {
configureInputReshapeByImage(ii, mm_0, input_reshape_table);
}
ii->getPreProcess().setResizeAlgorithm(IE::RESIZE_BILINEAR);
// FIXME: This isn't the best place to call reshape function.
// Сorrect solution would be to do this in compile() method of network,
// but now input meta isn't passed to compile() method.
if (!input_reshape_table.empty()) {
const_cast<IE::CNNNetwork *>(&uu.net)->reshape(input_reshape_table);
// FIXME: This isn't the best place to call reshape function.
// Сorrect solution would be to do this in compile() method of network,
// but now input meta isn't passed to compile() method.
if (!input_reshape_table.empty()) {
const_cast<IE::CNNNetwork *>(&uu.net)->reshape(input_reshape_table);
}
}
} else {
// This is a cv::GMat (equals to: cv::Mat)
@ -1268,8 +1314,10 @@ struct InferList2: public cv::detail::KernelTag {
GAPI_Assert(false &&
"Only Rect and Mat types are supported for infer list 2!");
}
req.SetBlob(ctx->uu.params.input_names[in_idx], this_blob);
setBlob(req,
ctx->uu.params.kind,
ctx->uu.params.input_names[in_idx],
this_blob);
}
req.StartAsync();
},

View File

@ -22,24 +22,6 @@ namespace IE = InferenceEngine;
namespace giewrap = cv::gimpl::ie::wrap;
using GIEParam = cv::gapi::ie::detail::ParamDesc;
IE::InputsDataMap giewrap::toInputsDataMap (const IE::ConstInputsDataMap& inputs) {
IE::InputsDataMap transformed;
auto convert = [](const std::pair<std::string, IE::InputInfo::CPtr>& p) {
return std::make_pair(p.first, std::const_pointer_cast<IE::InputInfo>(p.second));
};
std::transform(inputs.begin(), inputs.end(), std::inserter(transformed, transformed.end()), convert);
return transformed;
}
IE::OutputsDataMap giewrap::toOutputsDataMap (const IE::ConstOutputsDataMap& outputs) {
IE::OutputsDataMap transformed;
auto convert = [](const std::pair<std::string, IE::CDataPtr>& p) {
return std::make_pair(p.first, std::const_pointer_cast<IE::Data>(p.second));
};
std::transform(outputs.begin(), outputs.end(), std::inserter(transformed, transformed.end()), convert);
return transformed;
}
#if INF_ENGINE_RELEASE < 2020000000 // < 2020.1
// Load extensions (taken from DNN module)
std::vector<std::string> giewrap::getExtensions(const GIEParam& params) {

View File

@ -29,9 +29,6 @@ namespace wrap {
GAPI_EXPORTS std::vector<std::string> getExtensions(const GIEParam& params);
GAPI_EXPORTS IE::CNNNetwork readNetwork(const GIEParam& params);
IE::InputsDataMap toInputsDataMap (const IE::ConstInputsDataMap& inputs);
IE::OutputsDataMap toOutputsDataMap(const IE::ConstOutputsDataMap& outputs);
#if INF_ENGINE_RELEASE < 2019020000 // < 2019.R2
using Plugin = IE::InferencePlugin;
GAPI_EXPORTS IE::InferencePlugin getPlugin(const GIEParam& params);