fix bug in layer fusion

This commit is contained in:
zihaomu 2023-03-14 19:06:06 +08:00
parent de2f7666fb
commit 386be97ce2

View File

@ -247,7 +247,7 @@ void Net::Impl::fuseLayers(const std::vector<LayerPin>& blobsToKeep_)
{ {
// fuse naryEltwise layer // fuse naryEltwise layer
// bias must already be computed to fuse => bias layer must appear before convolution // bias must already be computed to fuse => bias layer must appear before convolution
if (biasLayerData->id < ld.id) if (biasLayerData->id < ld.id && biasLayerData->consumers.size() == 1)
{ {
// conv + naryEltwise. // conv + naryEltwise.
CV_Assert_N(biasLayerData->outputBlobs.size() == 1, ld.inputBlobs.size() == 1); CV_Assert_N(biasLayerData->outputBlobs.size() == 1, ld.inputBlobs.size() == 1);