diff --git a/modules/dnn/test/test_layers_1d.cpp b/modules/dnn/test/test_layers_1d.cpp index 3f734a53a6..097bce1ecd 100644 --- a/modules/dnn/test/test_layers_1d.cpp +++ b/modules/dnn/test/test_layers_1d.cpp @@ -307,4 +307,46 @@ INSTANTIATE_TEST_CASE_P(/*nothting*/, Layer_Split_Test, std::vector({4, 5}) )); +typedef testing::TestWithParam, std::vector>> Layer_Expand_Test; +TEST_P(Layer_Expand_Test, Accuracy_ND) { + + std::vector input_shape = get<0>(GetParam()); + std::vector target_shape = get<1>(GetParam()); + if (input_shape.size() >= target_shape.size()) // Skip if input shape is already larger than target shape + return; + + LayerParams lp; + lp.type = "Expand"; + lp.name = "ExpandLayer"; + lp.set("shape", DictValue::arrayInt(&target_shape[0], target_shape.size())); + + Ptr layer = ExpandLayer::create(lp); + Mat input(input_shape.size(), input_shape.data(), CV_32F); + cv::randn(input, 0.0, 1.0); + + cv::Mat output_ref(target_shape, CV_32F, input.data); + + std::vector inputs{input}; + std::vector outputs; + + runLayer(layer, inputs, outputs); + ASSERT_EQ(outputs.size(), 1); + ASSERT_EQ(shape(output_ref), shape(outputs[0])); + normAssert(output_ref, outputs[0]); +} +INSTANTIATE_TEST_CASE_P(/*nothing*/, Layer_Expand_Test, Combine( +/*input blob shape*/ testing::Values( + std::vector({}), + std::vector({1}), + std::vector({1, 1}), + std::vector({1, 1, 1}) + ), +/*output blob shape*/ testing::Values( + std::vector({1}), + std::vector({1, 1}), + std::vector({1, 1, 1}), + std::vector({1, 1, 1, 1}) + ) +)); + }}