mirror of
https://github.com/opencv/opencv.git
synced 2025-06-17 23:51:16 +08:00
Merge pull request #7371 from mshabunin:aruco-java-wrap
This commit is contained in:
commit
991c41c849
@ -795,7 +795,7 @@ class ClassInfo(GeneralInfo):
|
|||||||
self.base = re.sub(r"^.*:", "", decl[1].split(",")[0]).strip().replace(self.jname, "")
|
self.base = re.sub(r"^.*:", "", decl[1].split(",")[0]).strip().replace(self.jname, "")
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return Template("CLASS $namespace.$classpath.$name : $base").substitute(**self.__dict__)
|
return Template("CLASS $namespace::$classpath.$name : $base").substitute(**self.__dict__)
|
||||||
|
|
||||||
def getAllImports(self, module):
|
def getAllImports(self, module):
|
||||||
return ["import %s;" % c for c in sorted(self.imports) if not c.startswith('org.opencv.'+module)]
|
return ["import %s;" % c for c in sorted(self.imports) if not c.startswith('org.opencv.'+module)]
|
||||||
@ -1347,7 +1347,7 @@ class JavaWrapperGenerator(object):
|
|||||||
ret = "return (jlong) new %s(_retval_);" % self.fullTypeName(fi.ctype)
|
ret = "return (jlong) new %s(_retval_);" % self.fullTypeName(fi.ctype)
|
||||||
elif fi.ctype.startswith('Ptr_'):
|
elif fi.ctype.startswith('Ptr_'):
|
||||||
c_prologue.append("typedef Ptr<%s> %s;" % (self.fullTypeName(fi.ctype[4:]), fi.ctype))
|
c_prologue.append("typedef Ptr<%s> %s;" % (self.fullTypeName(fi.ctype[4:]), fi.ctype))
|
||||||
ret = "%(ctype)s* curval = new %(ctype)s(_retval_);return (jlong)curval->get();" % { 'ctype':fi.ctype }
|
ret = "return (jlong)(new %(ctype)s(_retval_));" % { 'ctype':fi.ctype }
|
||||||
elif self.isWrapped(ret_type): # pointer to wrapped class:
|
elif self.isWrapped(ret_type): # pointer to wrapped class:
|
||||||
ret = "return (jlong) _retval_;"
|
ret = "return (jlong) _retval_;"
|
||||||
elif type_dict[fi.ctype]["jni_type"] == "jdoubleArray":
|
elif type_dict[fi.ctype]["jni_type"] == "jdoubleArray":
|
||||||
@ -1406,6 +1406,8 @@ class JavaWrapperGenerator(object):
|
|||||||
clazz = ci.jname
|
clazz = ci.jname
|
||||||
cpp_code.write ( Template( \
|
cpp_code.write ( Template( \
|
||||||
"""
|
"""
|
||||||
|
${namespace}
|
||||||
|
|
||||||
JNIEXPORT $rtype JNICALL Java_org_opencv_${module}_${clazz}_$fname ($argst);
|
JNIEXPORT $rtype JNICALL Java_org_opencv_${module}_${clazz}_$fname ($argst);
|
||||||
|
|
||||||
JNIEXPORT $rtype JNICALL Java_org_opencv_${module}_${clazz}_$fname
|
JNIEXPORT $rtype JNICALL Java_org_opencv_${module}_${clazz}_$fname
|
||||||
@ -1440,6 +1442,7 @@ JNIEXPORT $rtype JNICALL Java_org_opencv_${module}_${clazz}_$fname
|
|||||||
cvargs = ", ".join(cvargs), \
|
cvargs = ", ".join(cvargs), \
|
||||||
default = default, \
|
default = default, \
|
||||||
retval = retval, \
|
retval = retval, \
|
||||||
|
namespace = ('using namespace ' + ci.namespace.replace('.', '::') + ';') if ci.namespace else ''
|
||||||
) )
|
) )
|
||||||
|
|
||||||
# processing args with default values
|
# processing args with default values
|
||||||
@ -1535,7 +1538,7 @@ JNIEXPORT void JNICALL Java_org_opencv_%(module)s_%(j_cls)s_delete
|
|||||||
'''
|
'''
|
||||||
Check if class stores Ptr<T>* instead of T* in nativeObj field
|
Check if class stores Ptr<T>* instead of T* in nativeObj field
|
||||||
'''
|
'''
|
||||||
return self.isWrapped(classname) and self.classes[classname].base
|
return self.isWrapped(classname)
|
||||||
|
|
||||||
def smartWrap(self, name, fullname):
|
def smartWrap(self, name, fullname):
|
||||||
'''
|
'''
|
||||||
|
@ -289,7 +289,7 @@ public:
|
|||||||
<number_of_variables_in_responses>`, containing types of each input and output variable. See
|
<number_of_variables_in_responses>`, containing types of each input and output variable. See
|
||||||
ml::VariableTypes.
|
ml::VariableTypes.
|
||||||
*/
|
*/
|
||||||
CV_WRAP static Ptr<cv::ml::TrainData> create(InputArray samples, int layout, InputArray responses,
|
CV_WRAP static Ptr<TrainData> create(InputArray samples, int layout, InputArray responses,
|
||||||
InputArray varIdx=noArray(), InputArray sampleIdx=noArray(),
|
InputArray varIdx=noArray(), InputArray sampleIdx=noArray(),
|
||||||
InputArray sampleWeights=noArray(), InputArray varType=noArray());
|
InputArray sampleWeights=noArray(), InputArray varType=noArray());
|
||||||
};
|
};
|
||||||
@ -324,7 +324,7 @@ public:
|
|||||||
@param flags optional flags, depending on the model. Some of the models can be updated with the
|
@param flags optional flags, depending on the model. Some of the models can be updated with the
|
||||||
new training samples, not completely overwritten (such as NormalBayesClassifier or ANN_MLP).
|
new training samples, not completely overwritten (such as NormalBayesClassifier or ANN_MLP).
|
||||||
*/
|
*/
|
||||||
CV_WRAP virtual bool train( const Ptr<cv::ml::TrainData>& trainData, int flags=0 );
|
CV_WRAP virtual bool train( const Ptr<TrainData>& trainData, int flags=0 );
|
||||||
|
|
||||||
/** @brief Trains the statistical model
|
/** @brief Trains the statistical model
|
||||||
|
|
||||||
@ -347,7 +347,7 @@ public:
|
|||||||
The method uses StatModel::predict to compute the error. For regression models the error is
|
The method uses StatModel::predict to compute the error. For regression models the error is
|
||||||
computed as RMS, for classifiers - as a percent of missclassified samples (0%-100%).
|
computed as RMS, for classifiers - as a percent of missclassified samples (0%-100%).
|
||||||
*/
|
*/
|
||||||
CV_WRAP virtual float calcError( const Ptr<cv::ml::TrainData>& data, bool test, OutputArray resp ) const;
|
CV_WRAP virtual float calcError( const Ptr<TrainData>& data, bool test, OutputArray resp ) const;
|
||||||
|
|
||||||
/** @brief Predicts response(s) for the provided sample(s)
|
/** @brief Predicts response(s) for the provided sample(s)
|
||||||
|
|
||||||
@ -361,7 +361,7 @@ public:
|
|||||||
|
|
||||||
The class must implement static `create()` method with no parameters or with all default parameter values
|
The class must implement static `create()` method with no parameters or with all default parameter values
|
||||||
*/
|
*/
|
||||||
template<typename _Tp> static Ptr<_Tp> train(const Ptr<cv::ml::TrainData>& data, int flags=0)
|
template<typename _Tp> static Ptr<_Tp> train(const Ptr<TrainData>& data, int flags=0)
|
||||||
{
|
{
|
||||||
Ptr<_Tp> model = _Tp::create();
|
Ptr<_Tp> model = _Tp::create();
|
||||||
return !model.empty() && model->train(data, flags) ? model : Ptr<_Tp>();
|
return !model.empty() && model->train(data, flags) ? model : Ptr<_Tp>();
|
||||||
@ -671,7 +671,7 @@ public:
|
|||||||
regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
|
regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and
|
||||||
the usual %SVM with parameters specified in params is executed.
|
the usual %SVM with parameters specified in params is executed.
|
||||||
*/
|
*/
|
||||||
virtual bool trainAuto( const Ptr<cv::ml::TrainData>& data, int kFold = 10,
|
virtual bool trainAuto( const Ptr<TrainData>& data, int kFold = 10,
|
||||||
ParamGrid Cgrid = SVM::getDefaultGrid(SVM::C),
|
ParamGrid Cgrid = SVM::getDefaultGrid(SVM::C),
|
||||||
ParamGrid gammaGrid = SVM::getDefaultGrid(SVM::GAMMA),
|
ParamGrid gammaGrid = SVM::getDefaultGrid(SVM::GAMMA),
|
||||||
ParamGrid pGrid = SVM::getDefaultGrid(SVM::P),
|
ParamGrid pGrid = SVM::getDefaultGrid(SVM::P),
|
||||||
|
Loading…
Reference in New Issue
Block a user