diff --git a/modules/dnn/include/opencv2/dnn/dnn.hpp b/modules/dnn/include/opencv2/dnn/dnn.hpp index 01e0021ccb..da0a407262 100644 --- a/modules/dnn/include/opencv2/dnn/dnn.hpp +++ b/modules/dnn/include/opencv2/dnn/dnn.hpp @@ -345,7 +345,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN CV_WRAP Ptr getLayer(LayerId layerId); /** @brief Returns pointers to input layers of specific layer. */ - CV_WRAP std::vector > getLayerInputs(LayerId layerId); + std::vector > getLayerInputs(LayerId layerId); // FIXIT: CV_WRAP /** @brief Delete layer for the network (not implemented yet) */ CV_WRAP void deleteLayer(LayerId layer); @@ -502,16 +502,16 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN * @param outLayerShapes output parameter for output layers shapes; * order is the same as in layersIds */ - CV_WRAP void getLayerShapes(const MatShape& netInputShape, + void getLayerShapes(const MatShape& netInputShape, const int layerId, CV_OUT std::vector& inLayerShapes, - CV_OUT std::vector& outLayerShapes) const; + CV_OUT std::vector& outLayerShapes) const; // FIXIT: CV_WRAP /** @overload */ - CV_WRAP void getLayerShapes(const std::vector& netInputShapes, + void getLayerShapes(const std::vector& netInputShapes, const int layerId, CV_OUT std::vector& inLayerShapes, - CV_OUT std::vector& outLayerShapes) const; + CV_OUT std::vector& outLayerShapes) const; // FIXIT: CV_WRAP /** @brief Computes FLOP for whole loaded model with specified input shapes. * @param netInputShapes vector of shapes for all net inputs. @@ -544,8 +544,8 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN * @param weights output parameter to store resulting bytes for weights. * @param blobs output parameter to store resulting bytes for intermediate blobs. */ - CV_WRAP void getMemoryConsumption(const std::vector& netInputShapes, - CV_OUT size_t& weights, CV_OUT size_t& blobs) const; + void getMemoryConsumption(const std::vector& netInputShapes, + CV_OUT size_t& weights, CV_OUT size_t& blobs) const; // FIXIT: CV_WRAP /** @overload */ CV_WRAP void getMemoryConsumption(const MatShape& netInputShape, CV_OUT size_t& weights, CV_OUT size_t& blobs) const; @@ -565,15 +565,15 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN * @param weights output parameter to store resulting bytes for weights. * @param blobs output parameter to store resulting bytes for intermediate blobs. */ - CV_WRAP void getMemoryConsumption(const std::vector& netInputShapes, + void getMemoryConsumption(const std::vector& netInputShapes, CV_OUT std::vector& layerIds, CV_OUT std::vector& weights, - CV_OUT std::vector& blobs) const; + CV_OUT std::vector& blobs) const; // FIXIT: CV_WRAP /** @overload */ - CV_WRAP void getMemoryConsumption(const MatShape& netInputShape, + void getMemoryConsumption(const MatShape& netInputShape, CV_OUT std::vector& layerIds, CV_OUT std::vector& weights, - CV_OUT std::vector& blobs) const; + CV_OUT std::vector& blobs) const; // FIXIT: CV_WRAP /** @brief Enables or disables layer fusion in the network. * @param fusion true to enable the fusion, false to disable. The fusion is enabled by default. diff --git a/modules/dnn/misc/java/src/cpp/dnn_converters.cpp b/modules/dnn/misc/java/src/cpp/dnn_converters.cpp index 584bd5762b..97ff01cc15 100644 --- a/modules/dnn/misc/java/src/cpp/dnn_converters.cpp +++ b/modules/dnn/misc/java/src/cpp/dnn_converters.cpp @@ -19,18 +19,6 @@ void MatShape_to_Mat(MatShape& matshape, cv::Mat& mat) mat = cv::Mat(matshape, true); } -void Mat_to_vector_size_t(cv::Mat& mat, std::vector& v_size_t) -{ - v_size_t.clear(); - CHECK_MAT(mat.type()==CV_32SC1 && mat.cols==1); - v_size_t = (std::vector) mat; -} - -void vector_size_t_to_Mat(std::vector& v_size_t, cv::Mat& mat) -{ - mat = cv::Mat(v_size_t, true); -} - std::vector List_to_vector_MatShape(JNIEnv* env, jobject list) { static jclass juArrayList = ARRAYLIST(env); diff --git a/modules/dnn/misc/java/src/cpp/dnn_converters.hpp b/modules/dnn/misc/java/src/cpp/dnn_converters.hpp index f590161958..1e152780fb 100644 --- a/modules/dnn/misc/java/src/cpp/dnn_converters.hpp +++ b/modules/dnn/misc/java/src/cpp/dnn_converters.hpp @@ -22,10 +22,6 @@ void Mat_to_MatShape(cv::Mat& mat, MatShape& matshape); void MatShape_to_Mat(MatShape& matshape, cv::Mat& mat); -void Mat_to_vector_size_t(cv::Mat& mat, std::vector& v_size_t); - -void vector_size_t_to_Mat(std::vector& v_size_t, cv::Mat& mat); - std::vector List_to_vector_MatShape(JNIEnv* env, jobject list); jobject vector_Ptr_Layer_to_List(JNIEnv* env, std::vector >& vs); diff --git a/modules/java/generator/src/cpp/listconverters.cpp b/modules/java/generator/src/cpp/listconverters.cpp index 150bdae091..d1f45c65e6 100644 --- a/modules/java/generator/src/cpp/listconverters.cpp +++ b/modules/java/generator/src/cpp/listconverters.cpp @@ -57,20 +57,3 @@ void Copy_vector_String_to_List(JNIEnv* env, std::vector& vs, jobjec env->DeleteLocalRef(element); } } - -#if defined(HAVE_OPENCV_DNN) -void Copy_vector_MatShape_to_List(JNIEnv* env, std::vector& vs, jobject list) -{ - static jclass juArrayList = ARRAYLIST(env); - jmethodID m_clear = LIST_CLEAR(env, juArrayList); - jmethodID m_add = LIST_ADD(env, juArrayList); - - env->CallVoidMethod(list, m_clear); - for (std::vector::iterator it = vs.begin(); it != vs.end(); ++it) - { - jstring element = env->NewStringUTF(""); - env->CallBooleanMethod(list, m_add, element); - env->DeleteLocalRef(element); - } -} -#endif diff --git a/modules/java/generator/src/cpp/listconverters.hpp b/modules/java/generator/src/cpp/listconverters.hpp index 5de2c3a12e..4d63d9d86f 100644 --- a/modules/java/generator/src/cpp/listconverters.hpp +++ b/modules/java/generator/src/cpp/listconverters.hpp @@ -16,9 +16,4 @@ std::vector List_to_vector_String(JNIEnv* env, jobject list); void Copy_vector_String_to_List(JNIEnv* env, std::vector& vs, jobject list); -#if defined(HAVE_OPENCV_DNN) -#include "opencv2/dnn.hpp" -void Copy_vector_MatShape_to_List(JNIEnv* env, std::vector& vs, jobject list); -#endif - #endif /* LISTCONVERTERS_HPP */