diff --git a/modules/dnn/src/layers/reduce_layer.cpp b/modules/dnn/src/layers/reduce_layer.cpp index 30f8139c25..d9d8b111fd 100644 --- a/modules/dnn/src/layers/reduce_layer.cpp +++ b/modules/dnn/src/layers/reduce_layer.cpp @@ -380,9 +380,10 @@ public: if (unprojected_indices[j] < shape_src[unreduced_axes[j]]) { break; } - unprojected_indices[j] = 0; + unprojected_indices[j] -= shape_src[unreduced_axes[j]]; + current_step -= shape_src[unreduced_axes[j]] * steps_src[unreduced_axes[j]]; ++unprojected_indices[j - 1]; - current_step = steps_src[unreduced_axes[j - 1]]; + current_step += steps_src[unreduced_axes[j - 1]]; } } } diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index 81c66b970f..a0171c9d91 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -1795,6 +1795,50 @@ INSTANTIATE_TEST_CASE_P(/**/, Layer_Test_ShuffleChannel, Combine( /*group*/ Values(1, 2, 3, 6), dnnBackendsAndTargets(/*with IE*/ false) )); +TEST(Layer_Test_ReduceMean, accuracy_input_0) +{ + vector szData = { 2, 1, 2, 1 ,2 }; + std::vector initData = { 0, 1, 2, 3, 4, 5, 6, 7 }; + Mat inpInitA(szData, CV_32FC1, Mat(initData).data); + std::vector resAxes0 = { 2, 3, 4, 5 }; + std::vector resAxes1 = { 0, 1, 2, 3, 4, 5, 6, 7 }; + std::vector resAxes2 = { 1, 2, 5, 6 }; + std::vector resAxes3 = { 0, 1, 2, 3, 4, 5, 6, 7 }; + std::vector resAxes4 = { 0.5, 2.5, 4.5, 6.5 }; + std::vector < vector> resReduceMean = { resAxes0, resAxes1, resAxes2, resAxes3, resAxes4 }; + + + for (int i = 0; i < resReduceMean.size(); i++) + { + Net net; + LayerParams lp; + lp.set("keepdims", 0); + lp.type = "Reduce"; + lp.set("reduce", "MEAN"); + lp.name = "testReduceMean"; + lp.set("axes", i); + lp.blobs.push_back(inpInitA); + + net.addLayerToPrev(lp.name, lp.type, lp); + net.setInput(inpInitA); + net.setPreferableBackend(DNN_BACKEND_OPENCV); + + Mat output = net.forward(); + MatShape gt_shape; + for (int j = 0; j < szData.size(); j++) + { + if (i == j) continue; + gt_shape.push_back(szData[j]); + } + + EXPECT_EQ(gt_shape, shape(output)); + + Mat a = output.reshape(1, output.total()); + normAssert(a, Mat(resReduceMean[i])); + } +} + + // Check if relu is not fused to convolution if we requested it's output TEST(Layer_Test_Convolution, relu_fusion) {