Merge remote-tracking branch 'upstream/3.4' into merge-3.4

This commit is contained in:
Alexander Alekhin 2021-07-01 10:52:31 +00:00
commit 8fad85edda
19 changed files with 107 additions and 53 deletions

View File

@ -138,8 +138,8 @@ if(INF_ENGINE_TARGET)
math(EXPR INF_ENGINE_RELEASE "${InferenceEngine_VERSION_MAJOR} * 1000000 + ${InferenceEngine_VERSION_MINOR} * 10000 + ${InferenceEngine_VERSION_PATCH} * 100") math(EXPR INF_ENGINE_RELEASE "${InferenceEngine_VERSION_MAJOR} * 1000000 + ${InferenceEngine_VERSION_MINOR} * 10000 + ${InferenceEngine_VERSION_PATCH} * 100")
endif() endif()
if(NOT INF_ENGINE_RELEASE) if(NOT INF_ENGINE_RELEASE)
message(WARNING "InferenceEngine version has not been set, 2021.3 will be used by default. Set INF_ENGINE_RELEASE variable if you experience build errors.") message(WARNING "InferenceEngine version has not been set, 2021.4 will be used by default. Set INF_ENGINE_RELEASE variable if you experience build errors.")
set(INF_ENGINE_RELEASE "2021030000") set(INF_ENGINE_RELEASE "2021040000")
endif() endif()
set(INF_ENGINE_RELEASE "${INF_ENGINE_RELEASE}" CACHE STRING "Force IE version, should be in form YYYYAABBCC (e.g. 2020.1.0.2 -> 2020010002)") set(INF_ENGINE_RELEASE "${INF_ENGINE_RELEASE}" CACHE STRING "Force IE version, should be in form YYYYAABBCC (e.g. 2020.1.0.2 -> 2020010002)")
set_target_properties(${INF_ENGINE_TARGET} PROPERTIES set_target_properties(${INF_ENGINE_TARGET} PROPERTIES

View File

@ -16,8 +16,8 @@
# define OPENCV_HAVE_FILESYSTEM_SUPPORT 1 # define OPENCV_HAVE_FILESYSTEM_SUPPORT 1
# elif defined(__APPLE__) # elif defined(__APPLE__)
# include <TargetConditionals.h> # include <TargetConditionals.h>
# if (defined(TARGET_OS_OSX) && TARGET_OS_OSX) || (!defined(TARGET_OS_OSX) && !TARGET_OS_IPHONE) # if (defined(TARGET_OS_OSX) && TARGET_OS_OSX) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS)
# define OPENCV_HAVE_FILESYSTEM_SUPPORT 1 // OSX only # define OPENCV_HAVE_FILESYSTEM_SUPPORT 1 // OSX, iOS only
# endif # endif
# else # else
/* unknown */ /* unknown */

View File

@ -657,7 +657,11 @@ void InfEngineNgraphNet::initPlugin(InferenceEngine::CNNNetwork& net)
try try
{ {
InferenceEngine::IExtensionPtr extension = InferenceEngine::IExtensionPtr extension =
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2021_4)
std::make_shared<InferenceEngine::Extension>(libName);
#else
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(libName); InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(libName);
#endif
ie.AddExtension(extension, "CPU"); ie.AddExtension(extension, "CPU");
CV_LOG_INFO(NULL, "DNN-IE: Loaded extension plugin: " << libName); CV_LOG_INFO(NULL, "DNN-IE: Loaded extension plugin: " << libName);
@ -1005,35 +1009,54 @@ void InfEngineNgraphNet::forward(const std::vector<Ptr<BackendWrapper> >& outBlo
reqWrapper->req.SetInput(inpBlobs); reqWrapper->req.SetInput(inpBlobs);
reqWrapper->req.SetOutput(outBlobs); reqWrapper->req.SetOutput(outBlobs);
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2021_4)
InferenceEngine::InferRequest infRequest = reqWrapper->req;
NgraphReqWrapper* wrapperPtr = reqWrapper.get();
CV_Assert(wrapperPtr && "Internal error");
#else
InferenceEngine::IInferRequest::Ptr infRequestPtr = reqWrapper->req; InferenceEngine::IInferRequest::Ptr infRequestPtr = reqWrapper->req;
infRequestPtr->SetUserData(reqWrapper.get(), 0); CV_Assert(infRequestPtr);
InferenceEngine::IInferRequest& infRequest = *infRequestPtr.get();
infRequest.SetUserData(reqWrapper.get(), 0);
#endif
infRequestPtr->SetCompletionCallback( #if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2021_4)
[](InferenceEngine::IInferRequest::Ptr request, InferenceEngine::StatusCode status) // do NOT capture 'reqWrapper' (smart ptr) in the lambda callback
infRequest.SetCompletionCallback<std::function<void(InferenceEngine::InferRequest, InferenceEngine::StatusCode)>>(
[wrapperPtr](InferenceEngine::InferRequest /*request*/, InferenceEngine::StatusCode status)
#else
infRequest.SetCompletionCallback(
[](InferenceEngine::IInferRequest::Ptr requestPtr, InferenceEngine::StatusCode status)
#endif
{ {
CV_LOG_DEBUG(NULL, "DNN(nGraph): completionCallback(" << (int)status << ")"); CV_LOG_DEBUG(NULL, "DNN(nGraph): completionCallback(" << (int)status << ")");
#if !INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2021_4)
CV_Assert(requestPtr);
InferenceEngine::IInferRequest& request = *requestPtr.get();
NgraphReqWrapper* wrapper; NgraphReqWrapper* wrapperPtr;
request->GetUserData((void**)&wrapper, 0); request.GetUserData((void**)&wrapperPtr, 0);
CV_Assert(wrapper && "Internal error"); CV_Assert(wrapperPtr && "Internal error");
#endif
NgraphReqWrapper& wrapper = *wrapperPtr;
size_t processedOutputs = 0; size_t processedOutputs = 0;
try try
{ {
for (; processedOutputs < wrapper->outProms.size(); ++processedOutputs) for (; processedOutputs < wrapper.outProms.size(); ++processedOutputs)
{ {
const std::string& name = wrapper->outsNames[processedOutputs]; const std::string& name = wrapper.outsNames[processedOutputs];
Mat m = ngraphBlobToMat(wrapper->req.GetBlob(name)); Mat m = ngraphBlobToMat(wrapper.req.GetBlob(name));
try try
{ {
CV_Assert(status == InferenceEngine::StatusCode::OK); CV_Assert(status == InferenceEngine::StatusCode::OK);
wrapper->outProms[processedOutputs].setValue(m.clone()); wrapper.outProms[processedOutputs].setValue(m.clone());
} }
catch (...) catch (...)
{ {
try { try {
wrapper->outProms[processedOutputs].setException(std::current_exception()); wrapper.outProms[processedOutputs].setException(std::current_exception());
} catch(...) { } catch(...) {
CV_LOG_ERROR(NULL, "DNN: Exception occurred during async inference exception propagation"); CV_LOG_ERROR(NULL, "DNN: Exception occurred during async inference exception propagation");
} }
@ -1043,16 +1066,16 @@ void InfEngineNgraphNet::forward(const std::vector<Ptr<BackendWrapper> >& outBlo
catch (...) catch (...)
{ {
std::exception_ptr e = std::current_exception(); std::exception_ptr e = std::current_exception();
for (; processedOutputs < wrapper->outProms.size(); ++processedOutputs) for (; processedOutputs < wrapper.outProms.size(); ++processedOutputs)
{ {
try { try {
wrapper->outProms[processedOutputs].setException(e); wrapper.outProms[processedOutputs].setException(e);
} catch(...) { } catch(...) {
CV_LOG_ERROR(NULL, "DNN: Exception occurred during async inference exception propagation"); CV_LOG_ERROR(NULL, "DNN: Exception occurred during async inference exception propagation");
} }
} }
} }
wrapper->isReady = true; wrapper.isReady = true;
} }
); );
} }

View File

@ -35,6 +35,7 @@ namespace dnn
class BatchNormLayerImpl CV_FINAL : public BatchNormLayer class BatchNormLayerImpl CV_FINAL : public BatchNormLayer
{ {
public: public:
Mat origin_weights, origin_bias;
Mat weights_, bias_; Mat weights_, bias_;
UMat umat_weight, umat_bias; UMat umat_weight, umat_bias;
mutable int dims; mutable int dims;
@ -88,11 +89,11 @@ public:
const float* weightsData = hasWeights ? blobs[weightsBlobIndex].ptr<float>() : 0; const float* weightsData = hasWeights ? blobs[weightsBlobIndex].ptr<float>() : 0;
const float* biasData = hasBias ? blobs[biasBlobIndex].ptr<float>() : 0; const float* biasData = hasBias ? blobs[biasBlobIndex].ptr<float>() : 0;
weights_.create(1, (int)n, CV_32F); origin_weights.create(1, (int)n, CV_32F);
bias_.create(1, (int)n, CV_32F); origin_bias.create(1, (int)n, CV_32F);
float* dstWeightsData = weights_.ptr<float>(); float* dstWeightsData = origin_weights.ptr<float>();
float* dstBiasData = bias_.ptr<float>(); float* dstBiasData = origin_bias.ptr<float>();
for (size_t i = 0; i < n; ++i) for (size_t i = 0; i < n; ++i)
{ {
@ -100,15 +101,12 @@ public:
dstWeightsData[i] = w; dstWeightsData[i] = w;
dstBiasData[i] = (hasBias ? biasData[i] : 0.0f) - w * meanData[i] * varMeanScale; dstBiasData[i] = (hasBias ? biasData[i] : 0.0f) - w * meanData[i] * varMeanScale;
} }
// We will use blobs to store origin weights and bias to restore them in case of reinitialization.
weights_.copyTo(blobs[0].reshape(1, 1));
bias_.copyTo(blobs[1].reshape(1, 1));
} }
virtual void finalize(InputArrayOfArrays, OutputArrayOfArrays) CV_OVERRIDE virtual void finalize(InputArrayOfArrays, OutputArrayOfArrays) CV_OVERRIDE
{ {
blobs[0].reshape(1, 1).copyTo(weights_); origin_weights.reshape(1, 1).copyTo(weights_);
blobs[1].reshape(1, 1).copyTo(bias_); origin_bias.reshape(1, 1).copyTo(bias_);
} }
void getScaleShift(Mat& scale, Mat& shift) const CV_OVERRIDE void getScaleShift(Mat& scale, Mat& shift) const CV_OVERRIDE

View File

@ -1954,6 +1954,23 @@ void ONNXImporter::handleNode(const opencv_onnx::NodeProto& node_proto_)
addConstant(layerParams.name, concatenated[0]); addConstant(layerParams.name, concatenated[0]);
return; return;
} }
else
{
for (int i = 0; i < node_proto.input_size(); ++i)
{
if (constBlobs.find(node_proto.input(i)) != constBlobs.end())
{
LayerParams constParams;
constParams.name = node_proto.input(i);
constParams.type = "Const";
constParams.blobs.push_back(getBlob(node_proto, i));
opencv_onnx::NodeProto proto;
proto.add_output(constParams.name);
addLayer(constParams, proto);
}
}
}
} }
else if (layer_type == "Resize") else if (layer_type == "Resize")
{ {

View File

@ -30,10 +30,11 @@
#define INF_ENGINE_RELEASE_2021_1 2021010000 #define INF_ENGINE_RELEASE_2021_1 2021010000
#define INF_ENGINE_RELEASE_2021_2 2021020000 #define INF_ENGINE_RELEASE_2021_2 2021020000
#define INF_ENGINE_RELEASE_2021_3 2021030000 #define INF_ENGINE_RELEASE_2021_3 2021030000
#define INF_ENGINE_RELEASE_2021_4 2021040000
#ifndef INF_ENGINE_RELEASE #ifndef INF_ENGINE_RELEASE
#warning("IE version have not been provided via command-line. Using 2021.3 by default") #warning("IE version have not been provided via command-line. Using 2021.4 by default")
#define INF_ENGINE_RELEASE INF_ENGINE_RELEASE_2021_3 #define INF_ENGINE_RELEASE INF_ENGINE_RELEASE_2021_4
#endif #endif
#define INF_ENGINE_VER_MAJOR_GT(ver) (((INF_ENGINE_RELEASE) / 10000) > ((ver) / 10000)) #define INF_ENGINE_VER_MAJOR_GT(ver) (((INF_ENGINE_RELEASE) / 10000) > ((ver) / 10000))

View File

@ -204,7 +204,7 @@ TEST_P(DNNTestNetwork, MobileNet_SSD_Caffe)
Mat inp = blobFromImage(sample, 1.0f / 127.5, Size(300, 300), Scalar(127.5, 127.5, 127.5), false); Mat inp = blobFromImage(sample, 1.0f / 127.5, Size(300, 300), Scalar(127.5, 127.5, 127.5), false);
float scoreDiff = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 1.5e-2 : 0.0; float scoreDiff = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 1.5e-2 : 0.0;
float iouDiff = (target == DNN_TARGET_MYRIAD) ? 0.063 : 0.0; float iouDiff = (target == DNN_TARGET_MYRIAD) ? 0.063 : 0.0;
float detectionConfThresh = (target == DNN_TARGET_MYRIAD) ? 0.252 : FLT_MIN; float detectionConfThresh = (target == DNN_TARGET_MYRIAD) ? 0.262 : FLT_MIN;
processNet("dnn/MobileNetSSD_deploy.caffemodel", "dnn/MobileNetSSD_deploy.prototxt", processNet("dnn/MobileNetSSD_deploy.caffemodel", "dnn/MobileNetSSD_deploy.prototxt",
inp, "detection_out", "", scoreDiff, iouDiff, detectionConfThresh); inp, "detection_out", "", scoreDiff, iouDiff, detectionConfThresh);
expectNoFallbacksFromIE(net); expectNoFallbacksFromIE(net);
@ -359,8 +359,8 @@ TEST_P(DNNTestNetwork, OpenPose_pose_coco)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_VERSION); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
#endif #endif
const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.0056 : 0.0; const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.009 : 0.0;
const float lInf = (target == DNN_TARGET_MYRIAD) ? 0.072 : 0.0; const float lInf = (target == DNN_TARGET_MYRIAD) ? 0.09 : 0.0;
processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt", processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt",
Size(46, 46), "", "", l1, lInf); Size(46, 46), "", "", l1, lInf);
expectNoFallbacksFromIE(net); expectNoFallbacksFromIE(net);
@ -380,8 +380,8 @@ TEST_P(DNNTestNetwork, OpenPose_pose_mpi)
#endif #endif
// output range: [-0.001, 0.97] // output range: [-0.001, 0.97]
const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.012 : 0.0; const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.02 : 0.0;
const float lInf = (target == DNN_TARGET_MYRIAD || target == DNN_TARGET_OPENCL_FP16) ? 0.16 : 0.0; const float lInf = (target == DNN_TARGET_MYRIAD || target == DNN_TARGET_OPENCL_FP16) ? 0.2 : 0.0;
processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt", processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt",
Size(46, 46), "", "", l1, lInf); Size(46, 46), "", "", l1, lInf);
expectNoFallbacksFromIE(net); expectNoFallbacksFromIE(net);

View File

@ -307,6 +307,15 @@ TEST_P(DNNTestOpenVINO, models)
ASSERT_FALSE(backendId != DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && backendId != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) << ASSERT_FALSE(backendId != DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && backendId != DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) <<
"Inference Engine backend is required"; "Inference Engine backend is required";
#if INF_ENGINE_VER_MAJOR_EQ(2021040000)
if (targetId == DNN_TARGET_MYRIAD && (
modelName == "person-detection-retail-0013" || // ncDeviceOpen:1013 Failed to find booted device after boot
modelName == "age-gender-recognition-retail-0013" // ncDeviceOpen:1013 Failed to find booted device after boot
)
)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
#endif
#if INF_ENGINE_VER_MAJOR_GE(2020020000) #if INF_ENGINE_VER_MAJOR_GE(2020020000)
if (targetId == DNN_TARGET_MYRIAD && backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019) if (targetId == DNN_TARGET_MYRIAD && backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
{ {

View File

@ -349,6 +349,7 @@ TEST_P(Test_ONNX_layers, Concatenation)
if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER); if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
} }
testONNXModels("concatenation"); testONNXModels("concatenation");
testONNXModels("concat_const_blobs");
} }
TEST_P(Test_ONNX_layers, Eltwise3D) TEST_P(Test_ONNX_layers, Eltwise3D)

View File

@ -290,9 +290,14 @@ TEST_P(Test_Torch_layers, net_padding)
TEST_P(Test_Torch_layers, net_non_spatial) TEST_P(Test_Torch_layers, net_non_spatial)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2021030000) #if defined(INF_ENGINE_RELEASE) && ( \
INF_ENGINE_VER_MAJOR_EQ(2021030000) || \
INF_ENGINE_VER_MAJOR_EQ(2021040000) \
)
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_MYRIAD)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH); // crash // 2021.3: crash
// 2021.4: [ GENERAL_ERROR ] AssertionFailed: !out.networkInputs.empty()
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL) if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH); // exception applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH); // exception
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16) if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)

View File

@ -72,18 +72,18 @@ void Hist_and_Backproj(int, void* )
//! [initialize] //! [initialize]
int histSize = MAX( bins, 2 ); int histSize = MAX( bins, 2 );
float hue_range[] = { 0, 180 }; float hue_range[] = { 0, 180 };
const float* ranges = { hue_range }; const float* ranges[] = { hue_range };
//! [initialize] //! [initialize]
//! [Get the Histogram and normalize it] //! [Get the Histogram and normalize it]
Mat hist; Mat hist;
calcHist( &hue, 1, 0, Mat(), hist, 1, &histSize, &ranges, true, false ); calcHist( &hue, 1, 0, Mat(), hist, 1, &histSize, ranges, true, false );
normalize( hist, hist, 0, 255, NORM_MINMAX, -1, Mat() ); normalize( hist, hist, 0, 255, NORM_MINMAX, -1, Mat() );
//! [Get the Histogram and normalize it] //! [Get the Histogram and normalize it]
//! [Get Backprojection] //! [Get Backprojection]
Mat backproj; Mat backproj;
calcBackProject( &hue, 1, 0, hist, backproj, &ranges, 1, true ); calcBackProject( &hue, 1, 0, hist, backproj, ranges, 1, true );
//! [Get Backprojection] //! [Get Backprojection]
//! [Draw the backproj] //! [Draw the backproj]

View File

@ -37,7 +37,7 @@ int main(int argc, char** argv)
//! [Set the ranges ( for B,G,R) )] //! [Set the ranges ( for B,G,R) )]
float range[] = { 0, 256 }; //the upper boundary is exclusive float range[] = { 0, 256 }; //the upper boundary is exclusive
const float* histRange = { range }; const float* histRange[] = { range };
//! [Set the ranges ( for B,G,R) )] //! [Set the ranges ( for B,G,R) )]
//! [Set histogram param] //! [Set histogram param]
@ -46,9 +46,9 @@ int main(int argc, char** argv)
//! [Compute the histograms] //! [Compute the histograms]
Mat b_hist, g_hist, r_hist; Mat b_hist, g_hist, r_hist;
calcHist( &bgr_planes[0], 1, 0, Mat(), b_hist, 1, &histSize, &histRange, uniform, accumulate ); calcHist( &bgr_planes[0], 1, 0, Mat(), b_hist, 1, &histSize, histRange, uniform, accumulate );
calcHist( &bgr_planes[1], 1, 0, Mat(), g_hist, 1, &histSize, &histRange, uniform, accumulate ); calcHist( &bgr_planes[1], 1, 0, Mat(), g_hist, 1, &histSize, histRange, uniform, accumulate );
calcHist( &bgr_planes[2], 1, 0, Mat(), r_hist, 1, &histSize, &histRange, uniform, accumulate ); calcHist( &bgr_planes[2], 1, 0, Mat(), r_hist, 1, &histSize, histRange, uniform, accumulate );
//! [Compute the histograms] //! [Compute the histograms]
//! [Draw the histograms for B, G and R] //! [Draw the histograms for B, G and R]

View File

@ -188,7 +188,7 @@ def main():
fig = plt.figure() fig = plt.figure()
ax = fig.gca(projection='3d') ax = fig.gca(projection='3d')
ax.set_aspect("equal") ax.set_aspect("auto")
cam_width = args.cam_width cam_width = args.cam_width
cam_height = args.cam_height cam_height = args.cam_height

View File

@ -32,7 +32,7 @@ def draw_gaussain(img, mean, cov, color):
w, u, _vt = cv.SVDecomp(cov) w, u, _vt = cv.SVDecomp(cov)
ang = np.arctan2(u[1, 0], u[0, 0])*(180/np.pi) ang = np.arctan2(u[1, 0], u[0, 0])*(180/np.pi)
s1, s2 = np.sqrt(w)*3.0 s1, s2 = np.sqrt(w)*3.0
cv.ellipse(img, (x, y), (s1, s2), ang, 0, 360, color, 1, cv.LINE_AA) cv.ellipse(img, (int(x), int(y)), (int(s1), int(s2)), ang, 0, 360, color, 1, cv.LINE_AA)
def main(): def main():

View File

@ -48,7 +48,7 @@ def hist_lines(im):
cv.normalize(hist_item,hist_item,0,255,cv.NORM_MINMAX) cv.normalize(hist_item,hist_item,0,255,cv.NORM_MINMAX)
hist=np.int32(np.around(hist_item)) hist=np.int32(np.around(hist_item))
for x,y in enumerate(hist): for x,y in enumerate(hist):
cv.line(h,(x,0),(x,y),(255,255,255)) cv.line(h,(x,0),(x,y[0]),(255,255,255))
y = np.flipud(h) y = np.flipud(h)
return y return y

View File

@ -77,8 +77,8 @@ class App:
for (x0, y0), (x1, y1), good in zip(self.p0[:,0], self.p1[:,0], status[:,0]): for (x0, y0), (x1, y1), good in zip(self.p0[:,0], self.p1[:,0], status[:,0]):
if good: if good:
cv.line(vis, (x0, y0), (x1, y1), (0, 128, 0)) cv.line(vis, (int(x0), int(y0)), (int(x1), int(y1)), (0, 128, 0))
cv.circle(vis, (x1, y1), 2, (red, green)[good], -1) cv.circle(vis, (int(x1), int(y1)), 2, (red, green)[good], -1)
draw_str(vis, (20, 20), 'track count: %d' % len(self.p1)) draw_str(vis, (20, 20), 'track count: %d' % len(self.p1))
if self.use_ransac: if self.use_ransac:
draw_str(vis, (20, 40), 'RANSAC') draw_str(vis, (20, 40), 'RANSAC')
@ -86,7 +86,7 @@ class App:
p = cv.goodFeaturesToTrack(frame_gray, **feature_params) p = cv.goodFeaturesToTrack(frame_gray, **feature_params)
if p is not None: if p is not None:
for x, y in p[:,0]: for x, y in p[:,0]:
cv.circle(vis, (x, y), 2, green, -1) cv.circle(vis, (int(x), int(y)), 2, green, -1)
draw_str(vis, (20, 20), 'feature count: %d' % len(p)) draw_str(vis, (20, 20), 'feature count: %d' % len(p))
cv.imshow('lk_homography', vis) cv.imshow('lk_homography', vis)

View File

@ -65,7 +65,7 @@ class App:
if len(tr) > self.track_len: if len(tr) > self.track_len:
del tr[0] del tr[0]
new_tracks.append(tr) new_tracks.append(tr)
cv.circle(vis, (x, y), 2, (0, 255, 0), -1) cv.circle(vis, (int(x), int(y)), 2, (0, 255, 0), -1)
self.tracks = new_tracks self.tracks = new_tracks
cv.polylines(vis, [np.int32(tr) for tr in self.tracks], False, (0, 255, 0)) cv.polylines(vis, [np.int32(tr) for tr in self.tracks], False, (0, 255, 0))
draw_str(vis, (20, 20), 'track count: %d' % len(self.tracks)) draw_str(vis, (20, 20), 'track count: %d' % len(self.tracks))

View File

@ -30,7 +30,7 @@ def main():
color = (0, 255, 0) color = (0, 255, 0)
cap = cv.VideoCapture(0) cap = cv.VideoCapture(0)
cap.set(cv.CAP_PROP_AUTOFOCUS, False) # Known bug: https://github.com/opencv/opencv/pull/5474 cap.set(cv.CAP_PROP_AUTOFOCUS, 0) # Known bug: https://github.com/opencv/opencv/pull/5474
cv.namedWindow("Video") cv.namedWindow("Video")
@ -67,7 +67,7 @@ def main():
break break
elif k == ord('g'): elif k == ord('g'):
convert_rgb = not convert_rgb convert_rgb = not convert_rgb
cap.set(cv.CAP_PROP_CONVERT_RGB, convert_rgb) cap.set(cv.CAP_PROP_CONVERT_RGB, 1 if convert_rgb else 0)
print('Done') print('Done')