mirror of
https://github.com/opencv/opencv.git
synced 2025-06-07 09:25:45 +08:00
Merge pull request #13359 from dkurt:dnn_keras_pad_concat
This commit is contained in:
commit
6fbf6f8bea
@ -77,6 +77,15 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
|
||||
static Ptr<Layer> create(const LayerParams ¶ms);
|
||||
};
|
||||
|
||||
/**
|
||||
* Constant layer produces the same data blob at an every forward pass.
|
||||
*/
|
||||
class CV_EXPORTS ConstLayer : public Layer
|
||||
{
|
||||
public:
|
||||
static Ptr<Layer> create(const LayerParams ¶ms);
|
||||
};
|
||||
|
||||
//! LSTM recurrent layer
|
||||
class CV_EXPORTS LSTMLayer : public Layer
|
||||
{
|
||||
|
@ -112,6 +112,7 @@ void initializeLayerFactory()
|
||||
CV_DNN_REGISTER_LAYER_CLASS(Dropout, BlankLayer);
|
||||
CV_DNN_REGISTER_LAYER_CLASS(Identity, BlankLayer);
|
||||
CV_DNN_REGISTER_LAYER_CLASS(Silence, BlankLayer);
|
||||
CV_DNN_REGISTER_LAYER_CLASS(Const, ConstLayer);
|
||||
|
||||
CV_DNN_REGISTER_LAYER_CLASS(Crop, CropLayer);
|
||||
CV_DNN_REGISTER_LAYER_CLASS(Eltwise, EltwiseLayer);
|
||||
|
68
modules/dnn/src/layers/const_layer.cpp
Normal file
68
modules/dnn/src/layers/const_layer.cpp
Normal file
@ -0,0 +1,68 @@
|
||||
// This file is part of OpenCV project.
|
||||
// It is subject to the license terms in the LICENSE file found in the top-level directory
|
||||
// of this distribution and at http://opencv.org/license.html.
|
||||
|
||||
// Copyright (C) 2018, Intel Corporation, all rights reserved.
|
||||
// Third party copyrights are property of their respective owners.
|
||||
|
||||
#include "../precomp.hpp"
|
||||
#include "layers_common.hpp"
|
||||
|
||||
#ifdef HAVE_OPENCL
|
||||
#include "opencl_kernels_dnn.hpp"
|
||||
#endif
|
||||
|
||||
namespace cv { namespace dnn {
|
||||
|
||||
class ConstLayerImpl CV_FINAL : public ConstLayer
|
||||
{
|
||||
public:
|
||||
ConstLayerImpl(const LayerParams& params)
|
||||
{
|
||||
setParamsFrom(params);
|
||||
CV_Assert(blobs.size() == 1);
|
||||
}
|
||||
|
||||
virtual bool getMemoryShapes(const std::vector<MatShape> &inputs,
|
||||
const int requiredOutputs,
|
||||
std::vector<MatShape> &outputs,
|
||||
std::vector<MatShape> &internals) const CV_OVERRIDE
|
||||
{
|
||||
CV_Assert(inputs.empty());
|
||||
outputs.assign(1, shape(blobs[0]));
|
||||
return false;
|
||||
}
|
||||
|
||||
#ifdef HAVE_OPENCL
|
||||
bool forward_ocl(InputArrayOfArrays inps, OutputArrayOfArrays outs, OutputArrayOfArrays internals)
|
||||
{
|
||||
std::vector<UMat> outputs;
|
||||
outs.getUMatVector(outputs);
|
||||
if (outs.depth() == CV_16S)
|
||||
convertFp16(blobs[0], outputs[0]);
|
||||
else
|
||||
blobs[0].copyTo(outputs[0]);
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
|
||||
void forward(InputArrayOfArrays inputs_arr, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays internals_arr) CV_OVERRIDE
|
||||
{
|
||||
CV_TRACE_FUNCTION();
|
||||
CV_TRACE_ARG_VALUE(name, "name", name.c_str());
|
||||
|
||||
CV_OCL_RUN(IS_DNN_OPENCL_TARGET(preferableTarget),
|
||||
forward_ocl(inputs_arr, outputs_arr, internals_arr))
|
||||
|
||||
std::vector<Mat> outputs;
|
||||
outputs_arr.getMatVector(outputs);
|
||||
blobs[0].copyTo(outputs[0]);
|
||||
}
|
||||
};
|
||||
|
||||
Ptr<Layer> ConstLayer::create(const LayerParams& params)
|
||||
{
|
||||
return Ptr<Layer>(new ConstLayerImpl(params));
|
||||
}
|
||||
|
||||
}} // namespace cv::dnn
|
@ -1266,14 +1266,31 @@ void TFImporter::populateNet(Net dstNet)
|
||||
axis = toNCHW(axis);
|
||||
layerParams.set("axis", axis);
|
||||
|
||||
int id = dstNet.addLayer(name, "Concat", layerParams);
|
||||
layer_id[name] = id;
|
||||
|
||||
|
||||
// input(0) or input(n-1) is concat_dim
|
||||
int from = (type == "Concat" ? 1 : 0);
|
||||
int to = (type == "Concat" ? layer.input_size() : layer.input_size() - 1);
|
||||
|
||||
// input(0) or input(n-1) is concat_dim
|
||||
for (int ii = from; ii < to; ii++)
|
||||
{
|
||||
Pin inp = parsePin(layer.input(ii));
|
||||
if (layer_id.find(inp.name) == layer_id.end())
|
||||
{
|
||||
// There are constant inputs.
|
||||
LayerParams lp;
|
||||
lp.name = inp.name;
|
||||
lp.type = "Const";
|
||||
lp.blobs.resize(1);
|
||||
blobFromTensor(getConstBlob(layer, value_id, ii), lp.blobs.back());
|
||||
CV_Assert_N(!lp.blobs[0].empty(), lp.blobs[0].type() == CV_32F);
|
||||
|
||||
int constInpId = dstNet.addLayer(lp.name, lp.type, lp);
|
||||
layer_id[lp.name] = constInpId;
|
||||
}
|
||||
}
|
||||
|
||||
int id = dstNet.addLayer(name, "Concat", layerParams);
|
||||
layer_id[name] = id;
|
||||
|
||||
for (int ii = from; ii < to; ii++)
|
||||
{
|
||||
Pin inp = parsePin(layer.input(ii));
|
||||
|
@ -136,6 +136,7 @@ TEST_P(Test_TensorFlow_layers, padding)
|
||||
runTensorFlowNet("padding_same");
|
||||
runTensorFlowNet("padding_valid");
|
||||
runTensorFlowNet("spatial_padding");
|
||||
runTensorFlowNet("keras_pad_concat");
|
||||
}
|
||||
|
||||
TEST_P(Test_TensorFlow_layers, eltwise_add_mul)
|
||||
|
Loading…
Reference in New Issue
Block a user