diff --git a/modules/dnn/src/layers/convolution_layer.cpp b/modules/dnn/src/layers/convolution_layer.cpp index 0c719a21fa..6b4bdb4c08 100644 --- a/modules/dnn/src/layers/convolution_layer.cpp +++ b/modules/dnn/src/layers/convolution_layer.cpp @@ -273,6 +273,9 @@ public: for(int i = 0; i < outCn; i++ ) biasvec[i] = biasMat.at(i); } +#ifdef HAVE_OPENCL + convolutionOp.release(); +#endif } bool setActivation(const Ptr& layer) diff --git a/modules/dnn/src/layers/fully_connected_layer.cpp b/modules/dnn/src/layers/fully_connected_layer.cpp index ccc8dec96a..48fdb1b3f1 100644 --- a/modules/dnn/src/layers/fully_connected_layer.cpp +++ b/modules/dnn/src/layers/fully_connected_layer.cpp @@ -267,6 +267,11 @@ public: }; #ifdef HAVE_OPENCL + void finalize(const std::vector &inputs, std::vector &outputs) + { + innerProductOp.release(); + } + bool forward_ocl(InputArrayOfArrays inps, OutputArrayOfArrays outs, InputArrayOfArrays internals) { std::vector inputs; diff --git a/modules/dnn/src/layers/lrn_layer.cpp b/modules/dnn/src/layers/lrn_layer.cpp index a01d5f0a8d..44687abdf3 100644 --- a/modules/dnn/src/layers/lrn_layer.cpp +++ b/modules/dnn/src/layers/lrn_layer.cpp @@ -96,6 +96,11 @@ public: } #ifdef HAVE_OPENCL + void finalize(const std::vector &inputs, std::vector &outputs) + { + lrnOp.release(); + } + bool forward_ocl(InputArrayOfArrays inps, OutputArrayOfArrays outs, OutputArrayOfArrays internals) { std::vector inputs; diff --git a/modules/dnn/src/layers/pooling_layer.cpp b/modules/dnn/src/layers/pooling_layer.cpp index 1aeba3a7be..cefe61c8ad 100644 --- a/modules/dnn/src/layers/pooling_layer.cpp +++ b/modules/dnn/src/layers/pooling_layer.cpp @@ -126,6 +126,10 @@ public: } getConvPoolPaddings(inp, out, kernel, stride, padMode, Size(1, 1), pad); + +#ifdef HAVE_OPENCL + poolOp.release(); +#endif } virtual bool supportBackend(int backendId) diff --git a/modules/dnn/src/layers/softmax_layer.cpp b/modules/dnn/src/layers/softmax_layer.cpp index 30db02b94c..b9f200dcb1 100644 --- a/modules/dnn/src/layers/softmax_layer.cpp +++ b/modules/dnn/src/layers/softmax_layer.cpp @@ -95,14 +95,7 @@ public: #ifdef HAVE_OPENCL virtual void finalize(const std::vector &inputs, std::vector &outputs) { - OCL4DNNSoftmaxConfig config; - - config.in_shape = shape(*inputs[0]); - config.axis = axisRaw; - config.channels = inputs[0]->size[axisRaw]; - config.logsoftmax = logSoftMax; - - softmaxOp = Ptr >(new OCL4DNNSoftmax(config)); + softmaxOp.release(); } bool forward_ocl(InputArrayOfArrays inps, OutputArrayOfArrays outs, OutputArrayOfArrays itns) @@ -115,6 +108,18 @@ public: outs.getUMatVector(outputs); itns.getUMatVector(internals); + if (softmaxOp.empty()) + { + OCL4DNNSoftmaxConfig config; + + config.in_shape = shape(inputs[0]); + config.axis = axisRaw; + config.channels = inputs[0].size[axisRaw]; + config.logsoftmax = logSoftMax; + + softmaxOp = Ptr >(new OCL4DNNSoftmax(config)); + } + UMat& src = inputs[0]; UMat& dstMat = outputs[0]; diff --git a/modules/dnn/test/test_googlenet.cpp b/modules/dnn/test/test_googlenet.cpp index 73460dedef..697f78025f 100644 --- a/modules/dnn/test/test_googlenet.cpp +++ b/modules/dnn/test/test_googlenet.cpp @@ -77,6 +77,10 @@ OCL_TEST(Reproducibility_GoogLeNet, Accuracy) net.setPreferableBackend(DNN_BACKEND_DEFAULT); net.setPreferableTarget(DNN_TARGET_OPENCL); + // Initialize network for a single image in the batch but test with batch size=2. + net.setInput(blobFromImage(Mat(224, 224, CV_8UC3))); + net.forward(); + std::vector inpMats; inpMats.push_back( imread(_tf("googlenet_0.png")) ); inpMats.push_back( imread(_tf("googlenet_1.png")) );