dnn: Slice with variable input shapes

This commit is contained in:
Dmitry Kurtaev 2020-05-05 13:07:26 +03:00
parent 27ee6501eb
commit 8b13b85c5e
2 changed files with 65 additions and 27 deletions

View File

@ -165,18 +165,19 @@ public:
CV_Assert(inputs.size() == 1); CV_Assert(inputs.size() == 1);
const MatSize& inpShape = inputs[0].size; const MatSize& inpShape = inputs[0].size;
finalSliceRanges = sliceRanges;
if (sliceRanges.empty()) if (sliceRanges.empty())
{ {
// Divide input blob on equal parts by axis. // Divide input blob on equal parts by axis.
int outAxisSize = inpShape[axis] / outputs.size(); int outAxisSize = inpShape[axis] / outputs.size();
sliceRanges.resize(outputs.size(), finalSliceRanges.resize(outputs.size(),
std::vector<Range>(axis + 1, Range::all())); std::vector<Range>(axis + 1, Range::all()));
int prevSlice = 0; int prevSlice = 0;
for (int i = 0; i < outputs.size(); ++i) for (int i = 0; i < outputs.size(); ++i)
{ {
sliceRanges[i][axis].start = prevSlice; finalSliceRanges[i][axis].start = prevSlice;
sliceRanges[i][axis].end = sliceRanges[i][axis].start + outAxisSize; finalSliceRanges[i][axis].end = finalSliceRanges[i][axis].start + outAxisSize;
prevSlice = sliceRanges[i][axis].end; prevSlice = finalSliceRanges[i][axis].end;
} }
} }
else else
@ -184,16 +185,16 @@ public:
for (int i = 0; i < outputs.size(); ++i) for (int i = 0; i < outputs.size(); ++i)
{ {
CV_Assert(sliceRanges[i].size() <= inpShape.dims()); CV_Assert(finalSliceRanges[i].size() <= inpShape.dims());
// Fill the rest of ranges. // Fill the rest of ranges.
for (int j = sliceRanges[i].size(); j < inpShape.dims(); ++j) for (int j = finalSliceRanges[i].size(); j < inpShape.dims(); ++j)
{ {
sliceRanges[i].push_back(Range::all()); finalSliceRanges[i].push_back(Range::all());
} }
// Clamp. // Clamp.
for (int j = 0; j < sliceRanges[i].size(); ++j) for (int j = 0; j < finalSliceRanges[i].size(); ++j)
{ {
sliceRanges[i][j] = clamp(sliceRanges[i][j], inpShape[j]); finalSliceRanges[i][j] = clamp(finalSliceRanges[i][j], inpShape[j]);
} }
} }
} }
@ -234,8 +235,8 @@ public:
kernel.set(idx++, (int)(rows * cols)); kernel.set(idx++, (int)(rows * cols));
kernel.set(idx++, (int)inpMat.size[3]); kernel.set(idx++, (int)inpMat.size[3]);
kernel.set(idx++, (int)cols); kernel.set(idx++, (int)cols);
kernel.set(idx++, (int)sliceRanges[i][2].start); kernel.set(idx++, (int)finalSliceRanges[i][2].start);
kernel.set(idx++, (int)sliceRanges[i][3].start); kernel.set(idx++, (int)finalSliceRanges[i][3].start);
kernel.set(idx++, ocl::KernelArg::PtrWriteOnly(outputs[i])); kernel.set(idx++, ocl::KernelArg::PtrWriteOnly(outputs[i]));
bool ret = kernel.run(1, global, local, false); bool ret = kernel.run(1, global, local, false);
if (!ret) if (!ret)
@ -259,10 +260,10 @@ public:
outputs_arr.getMatVector(outputs); outputs_arr.getMatVector(outputs);
const Mat& inpMat = inputs[0]; const Mat& inpMat = inputs[0];
CV_Assert(outputs.size() == sliceRanges.size()); CV_Assert(outputs.size() == finalSliceRanges.size());
for (size_t i = 0; i < outputs.size(); i++) for (size_t i = 0; i < outputs.size(); i++)
{ {
inpMat(sliceRanges[i]).copyTo(outputs[i]); inpMat(finalSliceRanges[i]).copyTo(outputs[i]);
} }
} }
@ -270,11 +271,11 @@ public:
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1) #if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE
{ {
CV_Assert_N(sliceRanges.size() == 1, inputs.size() <= 2); CV_Assert_N(finalSliceRanges.size() == 1, inputs.size() <= 2);
std::vector<size_t> axes, offsets, dims; std::vector<size_t> axes, offsets, dims;
int from, to, step; int from, to, step;
int numDims = sliceRanges[0].size(); int numDims = finalSliceRanges[0].size();
if (preferableTarget == DNN_TARGET_MYRIAD) if (preferableTarget == DNN_TARGET_MYRIAD)
{ {
from = axis; from = axis;
@ -290,8 +291,8 @@ public:
for (int i = from; i != to; i += step) for (int i = from; i != to; i += step)
{ {
axes.push_back(i); axes.push_back(i);
offsets.push_back(sliceRanges[0][i].start); offsets.push_back(finalSliceRanges[0][i].start);
dims.push_back(sliceRanges[0][i].size()); dims.push_back(finalSliceRanges[0][i].size());
} }
InferenceEngine::Builder::Layer ieLayer(name); InferenceEngine::Builder::Layer ieLayer(name);
@ -307,7 +308,7 @@ public:
{ {
std::vector<size_t> outShape(numDims); std::vector<size_t> outShape(numDims);
for (int i = 0; i < numDims; ++i) for (int i = 0; i < numDims; ++i)
outShape[i] = sliceRanges[0][i].size(); outShape[i] = finalSliceRanges[0][i].size();
ieLayer.getInputPorts()[1].setParameter("type", "weights"); ieLayer.getInputPorts()[1].setParameter("type", "weights");
@ -329,13 +330,13 @@ public:
{ {
CV_Assert_N(nodes.size() <= 2); CV_Assert_N(nodes.size() <= 2);
auto& ieInpNode = nodes[0].dynamicCast<InfEngineNgraphNode>()->node; auto& ieInpNode = nodes[0].dynamicCast<InfEngineNgraphNode>()->node;
CV_Assert(sliceRanges[0].size() == ieInpNode->get_shape().size()); CV_Assert(finalSliceRanges[0].size() == ieInpNode->get_shape().size());
std::vector<int64_t> offsets, dims; std::vector<int64_t> offsets, dims;
for (int i = 0; i < sliceRanges[0].size(); ++i) for (int i = 0; i < finalSliceRanges[0].size(); ++i)
{ {
offsets.push_back(sliceRanges[0][i].start); offsets.push_back(finalSliceRanges[0][i].start);
dims.push_back(sliceRanges[0][i].end); dims.push_back(finalSliceRanges[0][i].end);
} }
auto lower_bounds = std::make_shared<ngraph::op::Constant>(ngraph::element::i64, auto lower_bounds = std::make_shared<ngraph::op::Constant>(ngraph::element::i64,
@ -352,6 +353,9 @@ public:
} }
#endif // HAVE_DNN_NGRAPH #endif // HAVE_DNN_NGRAPH
protected:
// The actual non-negative values determined from @p sliceRanges depends on input size.
std::vector<std::vector<Range> > finalSliceRanges;
}; };
class CropLayerImpl CV_FINAL : public SliceLayerImpl class CropLayerImpl CV_FINAL : public SliceLayerImpl
@ -415,18 +419,18 @@ public:
offset_final[i] = offset[i - start_axis]; offset_final[i] = offset[i - start_axis];
} }
sliceRanges.resize(1); finalSliceRanges.resize(1);
sliceRanges[0].resize(dims); finalSliceRanges[0].resize(dims);
for (int i = 0; i < start_axis; i++) for (int i = 0; i < start_axis; i++)
{ {
sliceRanges[0][i] = Range(0, inpBlob.size[i]); finalSliceRanges[0][i] = Range(0, inpBlob.size[i]);
} }
for (int i = start_axis; i < dims; i++) for (int i = start_axis; i < dims; i++)
{ {
if (offset_final[i] < 0 || offset_final[i] + inpSzBlob.size[i] > inpBlob.size[i]) if (offset_final[i] < 0 || offset_final[i] + inpSzBlob.size[i] > inpBlob.size[i])
CV_Error(Error::StsBadArg, "invalid crop parameters or blob sizes"); CV_Error(Error::StsBadArg, "invalid crop parameters or blob sizes");
sliceRanges[0][i] = Range(offset_final[i], offset_final[i] + inpSzBlob.size[i]); finalSliceRanges[0][i] = Range(offset_final[i], offset_final[i] + inpSzBlob.size[i]);
} }
} }

View File

@ -1746,4 +1746,38 @@ TEST_P(Layer_Test_Resize, change_input)
INSTANTIATE_TEST_CASE_P(/**/, Layer_Test_Resize, dnnBackendsAndTargets()); INSTANTIATE_TEST_CASE_P(/**/, Layer_Test_Resize, dnnBackendsAndTargets());
typedef testing::TestWithParam<tuple<Backend, Target> > Layer_Test_Slice;
TEST_P(Layer_Test_Slice, variable_input_shape)
{
int backendId = get<0>(GetParam());
int targetId = get<1>(GetParam());
int begin[] = {0, 0, 0, 0};
int end[] = {-1, -1, -1, -1};
Net net;
LayerParams lp;
lp.type = "Slice";
lp.name = "testLayer";
lp.set("begin", DictValue::arrayInt<int*>(&begin[0], 4));
lp.set("end", DictValue::arrayInt<int*>(&end[0], 4));
net.addLayerToPrev(lp.name, lp.type, lp);
for (int i = 0; i < 2; ++i)
{
Mat inp(4 + i, 5 + i, CV_8UC1);
randu(inp, 0, 255);
inp = blobFromImage(inp);
net.setInput(inp);
net.setPreferableBackend(backendId);
net.setPreferableTarget(targetId);
Mat out = net.forward();
normAssert(out, inp);
}
}
INSTANTIATE_TEST_CASE_P(/**/, Layer_Test_Slice, dnnBackendsAndTargets());
}} // namespace }} // namespace