mirror of
https://github.com/opencv/opencv.git
synced 2025-06-27 23:11:57 +08:00
Merge pull request #9649 from dkurt:dnn_reshape_transpose
This commit is contained in:
commit
a0d3d11470
@ -132,6 +132,7 @@ public:
|
|||||||
|
|
||||||
for (size_t i = 0; i < inputs.size(); i++)
|
for (size_t i = 0; i < inputs.size(); i++)
|
||||||
{
|
{
|
||||||
|
CV_Assert(inputs[i].size() == 4);
|
||||||
CV_Assert(inputs[i][2] == shapeBefore[2] && inputs[i][3] == shapeBefore[3]);
|
CV_Assert(inputs[i][2] == shapeBefore[2] && inputs[i][3] == shapeBefore[3]);
|
||||||
CV_Assert(total(inputs[i]) == total(shapeAfter));
|
CV_Assert(total(inputs[i]) == total(shapeAfter));
|
||||||
outputs.push_back(shapeAfter);
|
outputs.push_back(shapeAfter);
|
||||||
|
@ -146,13 +146,11 @@ static void computeShapeByReshapeMask(const MatShape &srcShape,
|
|||||||
class ReshapeLayerImpl : public ReshapeLayer
|
class ReshapeLayerImpl : public ReshapeLayer
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
ReshapeLayerImpl(const LayerParams& params):
|
ReshapeLayerImpl(const LayerParams& params)
|
||||||
performReordering(false)
|
|
||||||
{
|
{
|
||||||
setParamsFrom(params);
|
setParamsFrom(params);
|
||||||
int axis = params.get<int>("axis", 0);
|
int axis = params.get<int>("axis", 0);
|
||||||
int numAxes = params.get<int>("num_axes", -1);
|
int numAxes = params.get<int>("num_axes", -1);
|
||||||
enableReordering = params.get<bool>("reorder_dims", false);
|
|
||||||
CV_Assert(numAxes >= -1);
|
CV_Assert(numAxes >= -1);
|
||||||
newShapeRange = (numAxes == -1) ? Range(axis, INT_MAX) : Range(axis, axis + numAxes);
|
newShapeRange = (numAxes == -1) ? Range(axis, INT_MAX) : Range(axis, axis + numAxes);
|
||||||
|
|
||||||
@ -184,25 +182,6 @@ public:
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
void finalize(const std::vector<Mat*> &inputs, std::vector<Mat> &outputs)
|
|
||||||
{
|
|
||||||
CV_Assert(inputs.size());
|
|
||||||
CV_Assert(outputs.size());
|
|
||||||
Mat srcBlob = *inputs[0];
|
|
||||||
int dims = srcBlob.dims;
|
|
||||||
MatShape inputShape = shape(srcBlob), outShape = shape(outputs[0]);
|
|
||||||
|
|
||||||
// input.total() == output.total(). So if reordering is require,
|
|
||||||
// one of the sizes will be are not equal.
|
|
||||||
// Example where reordering is require: from 1x128x4x4 to 1x2048
|
|
||||||
// Example where reordering is NOT require: from 1x1024x1x1 to 1x1024.
|
|
||||||
bool reorderingRequire = false;
|
|
||||||
const int minDims = min(dims, (int)outShape.size());
|
|
||||||
for (int i = 0; !reorderingRequire && i < minDims; ++i)
|
|
||||||
reorderingRequire = inputShape[i] != outShape[i];
|
|
||||||
performReordering = enableReordering && reorderingRequire;
|
|
||||||
}
|
|
||||||
|
|
||||||
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals)
|
||||||
{
|
{
|
||||||
CV_TRACE_FUNCTION();
|
CV_TRACE_FUNCTION();
|
||||||
@ -211,43 +190,10 @@ public:
|
|||||||
for (size_t i = 0; i < inputs.size(); i++)
|
for (size_t i = 0; i < inputs.size(); i++)
|
||||||
{
|
{
|
||||||
Mat srcBlob = *inputs[i];
|
Mat srcBlob = *inputs[i];
|
||||||
MatShape inputShape = shape(srcBlob), outShape = shape(outputs[i]);
|
if (outputs[i].data != srcBlob.data)
|
||||||
|
srcBlob.reshape(1, shape(outputs[i])).copyTo(outputs[i]);
|
||||||
if (performReordering)
|
|
||||||
{
|
|
||||||
float *dstData = internals[i].ptr<float>();
|
|
||||||
const float *srcData = srcBlob.ptr<float>();
|
|
||||||
|
|
||||||
int num = inputShape[0], channels = inputShape[1], height = inputShape[2], width = inputShape[3];
|
|
||||||
int total = num*channels*height*width;
|
|
||||||
for(int i_n = 0; i_n < num; i_n++) {
|
|
||||||
for(int i_c = 0; i_c < channels; i_c++) {
|
|
||||||
for(int i_h = 0; i_h < height; i_h++) {
|
|
||||||
for(int i_w = 0; i_w < width; i_w++) {
|
|
||||||
int src_i = channels*height*width*i_n + height*width*i_c + width*i_h + i_w;
|
|
||||||
int dst_i = channels*height*width*i_n + i_c + channels*width*i_h + channels*i_w;
|
|
||||||
|
|
||||||
CV_Assert(dst_i < total);
|
|
||||||
CV_Assert(src_i < total);
|
|
||||||
|
|
||||||
dstData[dst_i] = srcData[src_i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
internals[i].copyTo(outputs[i]);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (outputs[i].data != srcBlob.data)
|
|
||||||
srcBlob.reshape(1, outShape).copyTo(outputs[i]);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
|
||||||
std::vector<std::vector<int> > outShapes;
|
|
||||||
bool enableReordering, performReordering;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
Ptr<ReshapeLayer> ReshapeLayer::create(const LayerParams& params)
|
Ptr<ReshapeLayer> ReshapeLayer::create(const LayerParams& params)
|
||||||
|
@ -771,7 +771,6 @@ void TFImporter::populateNet(Net dstNet)
|
|||||||
else if (type == "Reshape")
|
else if (type == "Reshape")
|
||||||
{
|
{
|
||||||
layerParams.set("dim", parseDims(getConstBlob(layer, value_id, 1)));
|
layerParams.set("dim", parseDims(getConstBlob(layer, value_id, 1)));
|
||||||
layerParams.set("reorder_dims", true);
|
|
||||||
|
|
||||||
int id = dstNet.addLayer(name, "Reshape", layerParams);
|
int id = dstNet.addLayer(name, "Reshape", layerParams);
|
||||||
layer_id[name] = id;
|
layer_id[name] = id;
|
||||||
|
@ -166,13 +166,12 @@ TEST(Layer_Test_MVN, Accuracy)
|
|||||||
}
|
}
|
||||||
|
|
||||||
void testReshape(const MatShape& inputShape, const MatShape& targetShape,
|
void testReshape(const MatShape& inputShape, const MatShape& targetShape,
|
||||||
int axis = 0, int num_axes = -1, bool reorder_dims = false,
|
int axis = 0, int num_axes = -1,
|
||||||
MatShape mask = MatShape())
|
MatShape mask = MatShape())
|
||||||
{
|
{
|
||||||
LayerParams params;
|
LayerParams params;
|
||||||
params.set("axis", axis);
|
params.set("axis", axis);
|
||||||
params.set("num_axes", num_axes);
|
params.set("num_axes", num_axes);
|
||||||
params.set("reorder_dims", reorder_dims);
|
|
||||||
if (!mask.empty())
|
if (!mask.empty())
|
||||||
{
|
{
|
||||||
params.set("dim", DictValue::arrayInt<int*>(&mask[0], mask.size()));
|
params.set("dim", DictValue::arrayInt<int*>(&mask[0], mask.size()));
|
||||||
@ -201,7 +200,7 @@ TEST(Layer_Test_Reshape, Accuracy)
|
|||||||
int inp[] = {1, 128, 4, 4};
|
int inp[] = {1, 128, 4, 4};
|
||||||
int out[] = {1, 2048};
|
int out[] = {1, 2048};
|
||||||
int mask[] = {-1, 2048};
|
int mask[] = {-1, 2048};
|
||||||
testReshape(MatShape(inp, inp + 4), MatShape(out, out + 2), 0, -1, true,
|
testReshape(MatShape(inp, inp + 4), MatShape(out, out + 2), 0, -1,
|
||||||
MatShape(mask, mask + 2));
|
MatShape(mask, mask + 2));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -143,6 +143,11 @@ TEST(Test_TensorFlow, defun)
|
|||||||
runTensorFlowNet("defun_dropout");
|
runTensorFlowNet("defun_dropout");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST(Test_TensorFlow, reshape)
|
||||||
|
{
|
||||||
|
runTensorFlowNet("shift_reshape_no_reorder");
|
||||||
|
}
|
||||||
|
|
||||||
TEST(Test_TensorFlow, fp16)
|
TEST(Test_TensorFlow, fp16)
|
||||||
{
|
{
|
||||||
const float l1 = 1e-3;
|
const float l1 = 1e-3;
|
||||||
|
Loading…
Reference in New Issue
Block a user