Merge pull request #20156 from smirnov-alexey:as/gapi_remote_infer

G-API: Support remote inference

* Extend MediaFrame to be able to extract additional info besides access

* Add API for remote inference

* Add default implementation for blobParams()

* Add default implementation for blobParams()

* Address review comments

* Fix any_cast usage

* Add comment on the default blobParams()

* Address review comments

* Add missing rctx

* Minor fix

* Fix indentation and comment

* Address review comments

* Add documentation
This commit is contained in:
Alexey Smirnov 2021-06-26 00:09:33 +03:00 committed by GitHub
parent f88fdf6a1b
commit c95a56450d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 100 additions and 10 deletions

View File

@ -74,7 +74,11 @@ struct ParamDesc {
std::map<std::string, std::vector<std::size_t>> reshape_table;
std::unordered_set<std::string> layer_names_to_reshape;
// NB: Number of asyncrhonious infer requests
size_t nireq;
// NB: An optional config to setup RemoteContext for IE
cv::util::any context_config;
};
} // namespace detail
@ -115,7 +119,8 @@ public:
, {}
, {}
, {}
, 1u} {
, 1u
, {}} {
};
/** @overload
@ -135,7 +140,8 @@ public:
, {}
, {}
, {}
, 1u} {
, 1u
, {}} {
};
/** @brief Specifies sequence of network input layers names for inference.
@ -217,6 +223,30 @@ public:
return *this;
}
/** @brief Specifies configuration for RemoteContext in InferenceEngine.
When RemoteContext is configured the backend imports the networks using the context.
It also expects cv::MediaFrames to be actually remote, to operate with blobs via the context.
@param ctx_cfg cv::util::any value which holds InferenceEngine::ParamMap.
@return reference to this parameter structure.
*/
Params& cfgContextParams(const cv::util::any& ctx_cfg) {
desc.context_config = ctx_cfg;
return *this;
}
/** @overload
Function with an rvalue parameter.
@param ctx_cfg cv::util::any value which holds InferenceEngine::ParamMap.
@return reference to this parameter structure.
*/
Params& cfgContextParams(cv::util::any&& ctx_cfg) {
desc.context_config = std::move(ctx_cfg);
return *this;
}
/** @brief Specifies number of asynchronous inference requests.
@param nireq Number of inference asynchronous requests.
@ -318,7 +348,10 @@ public:
const std::string &model,
const std::string &weights,
const std::string &device)
: desc{ model, weights, device, {}, {}, {}, 0u, 0u, detail::ParamDesc::Kind::Load, true, {}, {}, {}, 1u}, m_tag(tag) {
: desc{ model, weights, device, {}, {}, {}, 0u, 0u,
detail::ParamDesc::Kind::Load, true, {}, {}, {}, 1u,
{}},
m_tag(tag) {
};
/** @overload
@ -333,7 +366,10 @@ public:
Params(const std::string &tag,
const std::string &model,
const std::string &device)
: desc{ model, {}, device, {}, {}, {}, 0u, 0u, detail::ParamDesc::Kind::Import, true, {}, {}, {}, 1u}, m_tag(tag) {
: desc{ model, {}, device, {}, {}, {}, 0u, 0u,
detail::ParamDesc::Kind::Import, true, {}, {}, {}, 1u,
{}},
m_tag(tag) {
};
/** @see ie::Params::pluginConfig. */

View File

@ -222,8 +222,17 @@ struct IEUnit {
IE::ExecutableNetwork this_network;
cv::gimpl::ie::wrap::Plugin this_plugin;
InferenceEngine::RemoteContext::Ptr rctx = nullptr;
explicit IEUnit(const cv::gapi::ie::detail::ParamDesc &pp)
: params(pp) {
InferenceEngine::ParamMap* ctx_params =
cv::util::any_cast<InferenceEngine::ParamMap>(&params.context_config);
if (ctx_params != nullptr) {
auto ie_core = cv::gimpl::ie::wrap::getCore();
rctx = ie_core.CreateContext(params.device_id, *ctx_params);
}
if (params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Load) {
net = cv::gimpl::ie::wrap::readNetwork(params);
inputs = net.getInputsInfo();
@ -231,7 +240,7 @@ struct IEUnit {
} else if (params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Import) {
this_plugin = cv::gimpl::ie::wrap::getPlugin(params);
this_plugin.SetConfig(params.config);
this_network = cv::gimpl::ie::wrap::importNetwork(this_plugin, params);
this_network = cv::gimpl::ie::wrap::importNetwork(this_plugin, params, rctx);
// FIXME: ICNNetwork returns InputsDataMap/OutputsDataMap,
// but ExecutableNetwork returns ConstInputsDataMap/ConstOutputsDataMap
inputs = cv::gimpl::ie::wrap::toInputsDataMap(this_network.GetInputsInfo());
@ -279,7 +288,8 @@ struct IEUnit {
// for loadNetwork they can be obtained by using readNetwork
non_const_this->this_plugin = cv::gimpl::ie::wrap::getPlugin(params);
non_const_this->this_plugin.SetConfig(params.config);
non_const_this->this_network = cv::gimpl::ie::wrap::loadNetwork(non_const_this->this_plugin, net, params);
non_const_this->this_network = cv::gimpl::ie::wrap::loadNetwork(non_const_this->this_plugin,
net, params, rctx);
}
return {params, this_plugin, this_network};
@ -481,7 +491,32 @@ using GConstGIEModel = ade::ConstTypedGraph
, IECallable
>;
inline IE::Blob::Ptr extractRemoteBlob(IECallContext& ctx, std::size_t i) {
GAPI_Assert(ctx.inShape(i) == cv::GShape::GFRAME &&
"Remote blob is supported for MediaFrame only");
cv::util::any any_blob_params = ctx.inFrame(i).blobParams();
auto ie_core = cv::gimpl::ie::wrap::getCore();
using ParamType = std::pair<InferenceEngine::TensorDesc,
InferenceEngine::ParamMap>;
ParamType* blob_params = cv::util::any_cast<ParamType>(&any_blob_params);
if (blob_params == nullptr) {
GAPI_Assert(false && "Incorrect type of blobParams: "
"expected std::pair<InferenceEngine::TensorDesc,"
"InferenceEngine::ParamMap>");
}
return ctx.uu.rctx->CreateBlob(blob_params->first,
blob_params->second);
}
inline IE::Blob::Ptr extractBlob(IECallContext& ctx, std::size_t i) {
if (ctx.uu.rctx != nullptr) {
return extractRemoteBlob(ctx, i);
}
switch (ctx.inShape(i)) {
case cv::GShape::GFRAME: {
const auto& frame = ctx.inFrame(i);
@ -1060,6 +1095,7 @@ struct InferList: public cv::detail::KernelTag {
}
IE::Blob::Ptr this_blob = extractBlob(*ctx, 1);
std::vector<std::vector<int>> cached_dims(ctx->uu.params.num_out);
for (auto i : ade::util::iota(ctx->uu.params.num_out)) {
const IE::DataPtr& ie_out = ctx->uu.outputs.at(ctx->uu.params.output_names[i]);

View File

@ -13,6 +13,7 @@
#include <vector>
#include <string>
#include <fstream>
#include "opencv2/gapi/infer/ie.hpp"
@ -50,12 +51,29 @@ GAPI_EXPORTS IE::Core getCore();
GAPI_EXPORTS IE::Core getPlugin(const GIEParam& params);
GAPI_EXPORTS inline IE::ExecutableNetwork loadNetwork( IE::Core& core,
const IE::CNNNetwork& net,
const GIEParam& params) {
return core.LoadNetwork(net, params.device_id);
const GIEParam& params,
IE::RemoteContext::Ptr rctx = nullptr) {
if (rctx != nullptr) {
return core.LoadNetwork(net, rctx);
} else {
return core.LoadNetwork(net, params.device_id);
}
}
GAPI_EXPORTS inline IE::ExecutableNetwork importNetwork( IE::Core& core,
const GIEParam& param) {
return core.ImportNetwork(param.model_path, param.device_id, {});
const GIEParam& params,
IE::RemoteContext::Ptr rctx = nullptr) {
if (rctx != nullptr) {
std::filebuf blobFile;
if (!blobFile.open(params.model_path, std::ios::in | std::ios::binary))
{
blobFile.close();
throw std::runtime_error("Could not open file");
}
std::istream graphBlob(&blobFile);
return core.ImportNetwork(graphBlob, rctx);
} else {
return core.ImportNetwork(params.model_path, params.device_id, {});
}
}
#endif // INF_ENGINE_RELEASE < 2019020000
}}}}