mirror of
https://github.com/opencv/opencv.git
synced 2025-01-18 22:44:02 +08:00
Merge pull request #17284 from dkurt:dnn_bn_fusion
This commit is contained in:
commit
bcf96d637e
@ -94,6 +94,15 @@ public:
|
||||
dstWeightsData[i] = w;
|
||||
dstBiasData[i] = (hasBias ? biasData[i] : 0.0f) - w * meanData[i] * varMeanScale;
|
||||
}
|
||||
// We will use blobs to store origin weights and bias to restore them in case of reinitialization.
|
||||
weights_.copyTo(blobs[0].reshape(1, 1));
|
||||
bias_.copyTo(blobs[1].reshape(1, 1));
|
||||
}
|
||||
|
||||
virtual void finalize(InputArrayOfArrays, OutputArrayOfArrays) CV_OVERRIDE
|
||||
{
|
||||
blobs[0].reshape(1, 1).copyTo(weights_);
|
||||
blobs[1].reshape(1, 1).copyTo(bias_);
|
||||
}
|
||||
|
||||
void getScaleShift(Mat& scale, Mat& shift) const CV_OVERRIDE
|
||||
|
@ -1780,4 +1780,61 @@ TEST_P(Layer_Test_Slice, variable_input_shape)
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(/**/, Layer_Test_Slice, dnnBackendsAndTargets());
|
||||
|
||||
typedef testing::TestWithParam<tuple<Backend, Target> > Layer_Test_BatchNorm;
|
||||
TEST_P(Layer_Test_BatchNorm, fusion)
|
||||
{
|
||||
// This tests reinitializes network by forwarding different batch size input.
|
||||
// We check BatchNorm layer weights restoring after fusion.
|
||||
int backendId = get<0>(GetParam());
|
||||
int targetId = get<1>(GetParam());
|
||||
const int ch = 4;
|
||||
|
||||
Mat mean(1, ch, CV_32F), var(1, ch, CV_32F), weights(1, ch, CV_32F);
|
||||
randu(mean, 0, 1);
|
||||
randu(var, 0, 1);
|
||||
randu(weights, 0, 1);
|
||||
|
||||
Net net;
|
||||
{
|
||||
LayerParams lp;
|
||||
lp.type = "BatchNorm";
|
||||
lp.name = "bn";
|
||||
lp.set("has_weight", false);
|
||||
lp.set("has_bias", false);
|
||||
lp.blobs.push_back(mean);
|
||||
lp.blobs.push_back(var);
|
||||
net.addLayerToPrev(lp.name, lp.type, lp);
|
||||
}
|
||||
{
|
||||
LayerParams lp;
|
||||
lp.type = "Scale";
|
||||
lp.name = "scale";
|
||||
lp.set("has_bias", false);
|
||||
lp.blobs.push_back(weights);
|
||||
net.addLayerToPrev(lp.name, lp.type, lp);
|
||||
}
|
||||
|
||||
Mat inp(4, 5, CV_32FC(ch));
|
||||
randu(inp, 0, 1);
|
||||
|
||||
net.setPreferableBackend(backendId);
|
||||
net.setPreferableTarget(targetId);
|
||||
|
||||
net.setInput(blobFromImage(inp));
|
||||
Mat ref = net.forward();
|
||||
|
||||
net.setInput(blobFromImages(std::vector<Mat>(2, inp)));
|
||||
Mat out = net.forward();
|
||||
|
||||
for (int i = 0; i < 2; ++i)
|
||||
{
|
||||
std::vector<Range> ranges(4, Range::all());
|
||||
ranges[0].start = i;
|
||||
ranges[0].end = i + 1;
|
||||
normAssert(out(ranges), ref);
|
||||
}
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(/**/, Layer_Test_BatchNorm, dnnBackendsAndTargets());
|
||||
|
||||
}} // namespace
|
||||
|
Loading…
Reference in New Issue
Block a user