mirror of
https://github.com/opencv/opencv.git
synced 2025-06-07 17:44:04 +08:00
Merge pull request #11893 from dkurt:fix_11884
This commit is contained in:
commit
c0d0cf5e74
@ -2075,7 +2075,8 @@ Mat Net::forward(const String& outputName)
|
|||||||
if (layerName.empty())
|
if (layerName.empty())
|
||||||
layerName = getLayerNames().back();
|
layerName = getLayerNames().back();
|
||||||
|
|
||||||
impl->setUpNet();
|
std::vector<LayerPin> pins(1, impl->getPinByAlias(layerName));
|
||||||
|
impl->setUpNet(pins);
|
||||||
impl->forwardToLayer(impl->getLayerData(layerName));
|
impl->forwardToLayer(impl->getLayerData(layerName));
|
||||||
|
|
||||||
return impl->getBlob(layerName);
|
return impl->getBlob(layerName);
|
||||||
@ -2085,13 +2086,13 @@ void Net::forward(OutputArrayOfArrays outputBlobs, const String& outputName)
|
|||||||
{
|
{
|
||||||
CV_TRACE_FUNCTION();
|
CV_TRACE_FUNCTION();
|
||||||
|
|
||||||
impl->setUpNet();
|
|
||||||
|
|
||||||
String layerName = outputName;
|
String layerName = outputName;
|
||||||
|
|
||||||
if (layerName.empty())
|
if (layerName.empty())
|
||||||
layerName = getLayerNames().back();
|
layerName = getLayerNames().back();
|
||||||
|
|
||||||
|
std::vector<LayerPin> pins(1, impl->getPinByAlias(layerName));
|
||||||
|
impl->setUpNet(pins);
|
||||||
impl->forwardToLayer(impl->getLayerData(layerName));
|
impl->forwardToLayer(impl->getLayerData(layerName));
|
||||||
|
|
||||||
LayerPin pin = impl->getPinByAlias(layerName);
|
LayerPin pin = impl->getPinByAlias(layerName);
|
||||||
|
@ -1240,4 +1240,36 @@ INSTANTIATE_TEST_CASE_P(/**/, Layer_Test_ShuffleChannel, Combine(
|
|||||||
/*group*/ Values(1, 2, 3, 6)
|
/*group*/ Values(1, 2, 3, 6)
|
||||||
));
|
));
|
||||||
|
|
||||||
|
// Check if relu is not fused to convolution if we requested it's output
|
||||||
|
TEST(Layer_Test_Convolution, relu_fusion)
|
||||||
|
{
|
||||||
|
Net net;
|
||||||
|
{
|
||||||
|
LayerParams lp;
|
||||||
|
lp.set("kernel_size", 1);
|
||||||
|
lp.set("num_output", 1);
|
||||||
|
lp.set("bias_term", false);
|
||||||
|
lp.type = "Convolution";
|
||||||
|
lp.name = "testConv";
|
||||||
|
|
||||||
|
int weightsShape[] = {1, 1, 1, 1};
|
||||||
|
Mat weights(4, &weightsShape[0], CV_32F, Scalar(1));
|
||||||
|
lp.blobs.push_back(weights);
|
||||||
|
net.addLayerToPrev(lp.name, lp.type, lp);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
LayerParams lp;
|
||||||
|
lp.type = "ReLU";
|
||||||
|
lp.name = "testReLU";
|
||||||
|
net.addLayerToPrev(lp.name, lp.type, lp);
|
||||||
|
}
|
||||||
|
int sz[] = {1, 1, 2, 3};
|
||||||
|
Mat input(4, &sz[0], CV_32F);
|
||||||
|
randu(input, -1.0, -0.1);
|
||||||
|
net.setInput(input);
|
||||||
|
net.setPreferableBackend(DNN_BACKEND_OPENCV);
|
||||||
|
Mat output = net.forward("testConv");
|
||||||
|
normAssert(input, output);
|
||||||
|
}
|
||||||
|
|
||||||
}} // namespace
|
}} // namespace
|
||||||
|
Loading…
Reference in New Issue
Block a user