Merge remote-tracking branch 'upstream/3.4' into merge-3.4

This commit is contained in:
Alexander Alekhin 2018-05-22 19:02:01 +03:00
commit 5e68f35500
40 changed files with 1566 additions and 354 deletions

View File

@ -279,6 +279,9 @@ OCV_OPTION(WITH_GPHOTO2 "Include gPhoto2 library support" ON
OCV_OPTION(WITH_LAPACK "Include Lapack library support" (NOT CV_DISABLE_OPTIMIZATION) IF (NOT ANDROID AND NOT IOS) )
OCV_OPTION(WITH_ITT "Include Intel ITT support" ON IF (NOT APPLE_FRAMEWORK) )
OCV_OPTION(WITH_PROTOBUF "Enable libprotobuf" ON )
OCV_OPTION(WITH_IMGCODEC_HDR "Include HDR support" ON)
OCV_OPTION(WITH_IMGCODEC_SUNRASTER "Include SUNRASTER support" ON)
OCV_OPTION(WITH_IMGCODEC_PXM "Include PNM (PBM,PGM,PPM) and PAM formats support" ON)
# OpenCV build components
# ===================================================
@ -1212,6 +1215,18 @@ if(WITH_GDCM OR HAVE_GDCM)
status(" GDCM:" HAVE_GDCM THEN "YES (ver ${GDCM_VERSION})" ELSE "NO")
endif()
if(WITH_IMGCODEC_HDR OR DEFINED HAVE_IMGCODEC_HDR)
status(" HDR:" HAVE_IMGCODEC_HDR THEN "YES" ELSE "NO")
endif()
if(WITH_IMGCODEC_SUNRASTER OR DEFINED HAVE_IMGCODEC_SUNRASTER)
status(" SUNRASTER:" HAVE_IMGCODEC_SUNRASTER THEN "YES" ELSE "NO")
endif()
if(WITH_IMGCODEC_PXM OR DEFINED HAVE_IMGCODEC_PXM)
status(" PXM:" HAVE_IMGCODEC_PXM THEN "YES" ELSE "NO")
endif()
# ========================== VIDEO IO ==========================
status("")
status(" Video I/O:")

View File

@ -203,6 +203,16 @@ if(CUDA_FOUND)
# cc1: warning: command line option '-Wsuggest-override' is valid for C++/ObjC++ but not for C
string(REPLACE "-Wsuggest-override" "" ${var} "${${var}}")
# issue: #11552 (from OpenCVCompilerOptions.cmake)
string(REGEX REPLACE "-Wimplicit-fallthrough(=[0-9]+)? " "" ${var} "${${var}}")
# removal of custom specified options
if(OPENCV_CUDA_NVCC_FILTEROUT_OPTIONS)
foreach(__flag ${OPENCV_CUDA_NVCC_FILTEROUT_OPTIONS})
string(REPLACE "${__flag}" "" ${var} "${${var}}")
endforeach()
endif()
endforeach()
endmacro()

View File

@ -27,7 +27,7 @@ if(NOT INF_ENGINE_ROOT_DIR OR NOT EXISTS "${INF_ENGINE_ROOT_DIR}/include/inferen
list(APPEND ie_root_paths "${INTEL_CVSDK_DIR}/inference_engine")
endif()
if(WITH_INF_ENGINE AND NOT ie_root_paths)
if(NOT ie_root_paths)
list(APPEND ie_root_paths "/opt/intel/deeplearning_deploymenttoolkit/deployment_tools/inference_engine")
endif()

View File

@ -252,3 +252,19 @@ if (WITH_GDCM)
set(GDCM_LIBRARIES gdcmMSFF) # GDCM does not set this variable for some reason
endif()
endif()
if(WITH_IMGCODEC_HDR)
set(HAVE_IMGCODEC_HDR ON)
elseif(DEFINED WITH_IMGCODEC_HDR)
set(HAVE_IMGCODEC_HDR OFF)
endif()
if(WITH_IMGCODEC_SUNRASTER)
set(HAVE_IMGCODEC_SUNRASTER ON)
elseif(DEFINED WITH_IMGCODEC_SUNRASTER)
set(HAVE_IMGCODEC_SUNRASTER OFF)
endif()
if(WITH_IMGCODEC_PXM)
set(HAVE_IMGCODEC_PXM ON)
elseif(DEFINED WITH_IMGCODEC_PXM)
set(HAVE_IMGCODEC_PXM OFF)
endif()

View File

@ -11,7 +11,7 @@ In this tutorial you will learn how to:
Theory
------
The *Canny Edge detector* was developed by John F. Canny in 1986. Also known to many as the
The *Canny Edge detector* @cite Canny86 was developed by John F. Canny in 1986. Also known to many as the
*optimal detector*, the Canny algorithm aims to satisfy three main criteria:
- **Low error rate:** Meaning a good detection of only existent edges.
- **Good localization:** The distance between edge pixels detected and real edge pixels have
@ -66,19 +66,33 @@ The *Canny Edge detector* was developed by John F. Canny in 1986. Also known to
Code
----
-# **What does this program do?**
@add_toggle_cpp
- The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ImgTrans/CannyDetector_Demo.cpp)
@include samples/cpp/tutorial_code/ImgTrans/CannyDetector_Demo.cpp
@end_toggle
@add_toggle_java
- The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/ImgTrans/canny_detector/CannyDetectorDemo.java)
@include samples/java/tutorial_code/ImgTrans/canny_detector/CannyDetectorDemo.java
@end_toggle
@add_toggle_python
- The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/ImgTrans/canny_detector/CannyDetector_Demo.py)
@include samples/python/tutorial_code/ImgTrans/canny_detector/CannyDetector_Demo.py
@end_toggle
- **What does this program do?**
- Asks the user to enter a numerical value to set the lower threshold for our *Canny Edge
Detector* (by means of a Trackbar).
- Applies the *Canny Detector* and generates a **mask** (bright lines representing the edges
on a black background).
- Applies the mask obtained on the original image and display it in a window.
-# The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ImgTrans/CannyDetector_Demo.cpp)
@include samples/cpp/tutorial_code/ImgTrans/CannyDetector_Demo.cpp
Explanation
-----------
Explanation (C++ code)
----------------------
-# Create some needed variables:
@snippet cpp/tutorial_code/ImgTrans/CannyDetector_Demo.cpp variables

View File

@ -45,61 +45,91 @@ Theory
Code
----
-# **What does this program do?**
- **What does this program do?**
- Loads an image
- Each second, apply 1 of 4 different remapping processes to the image and display them
indefinitely in a window.
- Wait for the user to exit the program
-# The tutorial code's is shown lines below. You can also download it from
@add_toggle_cpp
- The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ImgTrans/Remap_Demo.cpp)
@include samples/cpp/tutorial_code/ImgTrans/Remap_Demo.cpp
@end_toggle
@add_toggle_java
- The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/ImgTrans/remap/RemapDemo.java)
@include samples/java/tutorial_code/ImgTrans/remap/RemapDemo.java
@end_toggle
@add_toggle_python
- The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/ImgTrans/remap/Remap_Demo.py)
@include samples/python/tutorial_code/ImgTrans/remap/Remap_Demo.py
@end_toggle
Explanation
-----------
-# Create some variables we will use:
@code{.cpp}
Mat src, dst;
Mat map_x, map_y;
char* remap_window = "Remap demo";
int ind = 0;
@endcode
-# Load an image:
@code{.cpp}
src = imread( argv[1], 1 );
@endcode
-# Create the destination image and the two mapping matrices (for x and y )
@code{.cpp}
dst.create( src.size(), src.type() );
map_x.create( src.size(), CV_32FC1 );
map_y.create( src.size(), CV_32FC1 );
@endcode
-# Create a window to display results
@code{.cpp}
namedWindow( remap_window, WINDOW_AUTOSIZE );
@endcode
-# Establish a loop. Each 1000 ms we update our mapping matrices (*mat_x* and *mat_y*) and apply
- Load an image:
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgTrans/Remap_Demo.cpp Load
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgTrans/remap/RemapDemo.java Load
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/ImgTrans/remap/Remap_Demo.py Load
@end_toggle
- Create the destination image and the two mapping matrices (for x and y )
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgTrans/Remap_Demo.cpp Create
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgTrans/remap/RemapDemo.java Create
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/ImgTrans/remap/Remap_Demo.py Create
@end_toggle
- Create a window to display results
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgTrans/Remap_Demo.cpp Window
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgTrans/remap/RemapDemo.java Window
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/ImgTrans/remap/Remap_Demo.py Window
@end_toggle
- Establish a loop. Each 1000 ms we update our mapping matrices (*mat_x* and *mat_y*) and apply
them to our source image:
@code{.cpp}
while( true )
{
/// Each 1 sec. Press ESC to exit the program
char c = (char)waitKey( 1000 );
if( c == 27 )
{ break; }
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgTrans/Remap_Demo.cpp Loop
@end_toggle
/// Update map_x & map_y. Then apply remap
update_map();
remap( src, dst, map_x, map_y, INTER_LINEAR, BORDER_CONSTANT, Scalar(0,0, 0) );
@add_toggle_java
@snippet samples/java/tutorial_code/ImgTrans/remap/RemapDemo.java Loop
@end_toggle
/// Display results
imshow( remap_window, dst );
}
@endcode
The function that applies the remapping is @ref cv::remap . We give the following arguments:
@add_toggle_python
@snippet samples/python/tutorial_code/ImgTrans/remap/Remap_Demo.py Loop
@end_toggle
- The function that applies the remapping is @ref cv::remap . We give the following arguments:
- **src**: Source image
- **dst**: Destination image of same size as *src*
- **map_x**: The mapping function in the x direction. It is equivalent to the first component
@ -112,9 +142,9 @@ Explanation
How do we update our mapping matrices *mat_x* and *mat_y*? Go on reading:
-# **Updating the mapping matrices:** We are going to perform 4 different mappings:
- **Updating the mapping matrices:** We are going to perform 4 different mappings:
-# Reduce the picture to half its size and will display it in the middle:
\f[h(i,j) = ( 2*i - src.cols/2 + 0.5, 2*j - src.rows/2 + 0.5)\f]
\f[h(i,j) = ( 2 \times i - src.cols/2 + 0.5, 2 \times j - src.rows/2 + 0.5)\f]
for all pairs \f$(i,j)\f$ such that: \f$\dfrac{src.cols}{4}<i<\dfrac{3 \cdot src.cols}{4}\f$ and
\f$\dfrac{src.rows}{4}<j<\dfrac{3 \cdot src.rows}{4}\f$
-# Turn the image upside down: \f$h( i, j ) = (i, src.rows - j)\f$
@ -123,41 +153,18 @@ Explanation
This is expressed in the following snippet. Here, *map_x* represents the first coordinate of
*h(i,j)* and *map_y* the second coordinate.
@code{.cpp}
for( int j = 0; j < src.rows; j++ )
{ for( int i = 0; i < src.cols; i++ )
{
switch( ind )
{
case 0:
if( i > src.cols*0.25 && i < src.cols*0.75 && j > src.rows*0.25 && j < src.rows*0.75 )
{
map_x.at<float>(j,i) = 2*( i - src.cols*0.25 ) + 0.5 ;
map_y.at<float>(j,i) = 2*( j - src.rows*0.25 ) + 0.5 ;
}
else
{ map_x.at<float>(j,i) = 0 ;
map_y.at<float>(j,i) = 0 ;
}
break;
case 1:
map_x.at<float>(j,i) = i ;
map_y.at<float>(j,i) = src.rows - j ;
break;
case 2:
map_x.at<float>(j,i) = src.cols - i ;
map_y.at<float>(j,i) = j ;
break;
case 3:
map_x.at<float>(j,i) = src.cols - i ;
map_y.at<float>(j,i) = src.rows - j ;
break;
} // end of switch
}
}
ind++;
}
@endcode
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgTrans/Remap_Demo.cpp Update
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgTrans/remap/RemapDemo.java Update
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/ImgTrans/remap/Remap_Demo.py Update
@end_toggle
Result
------

View File

@ -15,6 +15,8 @@ In this section you will learn about the image processing (manipulation) functio
- @subpage tutorial_erosion_dilatation
*Languages:* C++, Java, Python
*Compatibility:* \> OpenCV 2.0
Author: Ana Huamán
@ -23,6 +25,8 @@ In this section you will learn about the image processing (manipulation) functio
- @subpage tutorial_opening_closing_hats
*Languages:* C++, Java, Python
*Compatibility:* \> OpenCV 2.0
*Author:* Ana Huamán
@ -61,6 +65,8 @@ In this section you will learn about the image processing (manipulation) functio
- @subpage tutorial_threshold
*Languages:* C++, Java, Python
*Compatibility:* \> OpenCV 2.0
*Author:* Ana Huamán
@ -69,6 +75,8 @@ In this section you will learn about the image processing (manipulation) functio
- @subpage tutorial_threshold_inRange
*Languages:* C++, Java, Python
*Compatibility:* \> OpenCV 2.0
*Author:* Rishiraj Surti
@ -117,6 +125,8 @@ In this section you will learn about the image processing (manipulation) functio
- @subpage tutorial_canny_detector
*Languages:* C++, Java, Python
*Compatibility:* \> OpenCV 2.0
*Author:* Ana Huamán
@ -145,6 +155,8 @@ In this section you will learn about the image processing (manipulation) functio
- @subpage tutorial_remap
*Languages:* C++, Java, Python
*Compatibility:* \> OpenCV 2.0
*Author:* Ana Huamán

View File

@ -96,43 +96,101 @@ Thresholding?
Code
----
@add_toggle_cpp
The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ImgProc/Threshold.cpp)
@include samples/cpp/tutorial_code/ImgProc/Threshold.cpp
@end_toggle
@add_toggle_java
The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/ImgProc/threshold/Threshold.java)
@include samples/java/tutorial_code/ImgProc/threshold/Threshold.java
@end_toggle
@add_toggle_python
The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/imgProc/threshold/threshold.py)
@include samples/python/tutorial_code/imgProc/threshold/threshold.py
@end_toggle
Explanation
-----------
-# Let's check the general structure of the program:
- Load an image. If it is BGR we convert it to Grayscale. For this, remember that we can use
Let's check the general structure of the program:
- Load an image. If it is BGR we convert it to Grayscale. For this, remember that we can use
the function @ref cv::cvtColor :
@snippet cpp/tutorial_code/ImgProc/Threshold.cpp load
- Create a window to display the result
@snippet cpp/tutorial_code/ImgProc/Threshold.cpp window
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgProc/Threshold.cpp load
@end_toggle
- Create \f$2\f$ trackbars for the user to enter user input:
@add_toggle_java
@snippet samples/java/tutorial_code/ImgProc/threshold/Threshold.java load
@end_toggle
- **Type of thresholding**: Binary, To Zero, etc...
- **Threshold value**
@snippet cpp/tutorial_code/ImgProc/Threshold.cpp trackbar
@add_toggle_python
@snippet samples/python/tutorial_code/imgProc/threshold/threshold.py load
@end_toggle
- Wait until the user enters the threshold value, the type of thresholding (or until the
program exits)
- Whenever the user changes the value of any of the Trackbars, the function *Threshold_Demo*
is called:
@snippet cpp/tutorial_code/ImgProc/Threshold.cpp Threshold_Demo
- Create a window to display the result
As you can see, the function @ref cv::threshold is invoked. We give \f$5\f$ parameters:
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgProc/Threshold.cpp window
@end_toggle
- *src_gray*: Our input image
- *dst*: Destination (output) image
- *threshold_value*: The \f$thresh\f$ value with respect to which the thresholding operation
is made
- *max_BINARY_value*: The value used with the Binary thresholding operations (to set the
chosen pixels)
- *threshold_type*: One of the \f$5\f$ thresholding operations. They are listed in the
comment section of the function above.
@add_toggle_java
@snippet samples/java/tutorial_code/ImgProc/threshold/Threshold.java window
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/imgProc/threshold/threshold.py window
@end_toggle
- Create \f$2\f$ trackbars for the user to enter user input:
- **Type of thresholding**: Binary, To Zero, etc...
- **Threshold value**
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgProc/Threshold.cpp trackbar
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgProc/threshold/Threshold.java trackbar
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/imgProc/threshold/threshold.py trackbar
@end_toggle
- Wait until the user enters the threshold value, the type of thresholding (or until the
program exits)
- Whenever the user changes the value of any of the Trackbars, the function *Threshold_Demo*
(*update* in Java) is called:
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgProc/Threshold.cpp Threshold_Demo
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgProc/threshold/Threshold.java Threshold_Demo
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/imgProc/threshold/threshold.py Threshold_Demo
@end_toggle
As you can see, the function @ref cv::threshold is invoked. We give \f$5\f$ parameters in C++ code:
- *src_gray*: Our input image
- *dst*: Destination (output) image
- *threshold_value*: The \f$thresh\f$ value with respect to which the thresholding operation
is made
- *max_BINARY_value*: The value used with the Binary thresholding operations (to set the
chosen pixels)
- *threshold_type*: One of the \f$5\f$ thresholding operations. They are listed in the
comment section of the function above.
Results
-------

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -1,56 +1,173 @@
Thresholding Operations using inRange {#tutorial_threshold_inRange}
=============================
=====================================
Goal
----
In this tutorial you will learn how to:
- Perform basic thresholding operations using OpenCV function @ref cv::inRange
- Detect an object based on the range of pixel values it has
- Perform basic thresholding operations using OpenCV @ref cv::inRange function.
- Detect an object based on the range of pixel values in the HSV colorspace.
Theory
-----------
- In the previous tutorial, we learnt how perform thresholding using @ref cv::threshold function.
------
- In the previous tutorial, we learnt how to perform thresholding using @ref cv::threshold function.
- In this tutorial, we will learn how to do it using @ref cv::inRange function.
- The concept remains same, but now we add a range of pixel values we need.
- The concept remains the same, but now we add a range of pixel values we need.
HSV colorspace
--------------
<a href="https://en.wikipedia.org/wiki/HSL_and_HSV">HSV</a> (hue, saturation, value) colorspace
is a model to represent the colorspace similar to the RGB color model. Since the hue channel
models the color type, it is very useful in image processing tasks that need to segment objects
based on its color. Variation of the saturation goes from unsaturated to represent shades of gray and
fully saturated (no white component). Value channel describes the brightness or the intensity of the
color. Next image shows the HSV cylinder.
![By SharkDderivative work: SharkD [CC BY-SA 3.0 or GFDL], via Wikimedia Commons](images/Threshold_inRange_HSV_colorspace.jpg)
Since colors in the RGB colorspace are coded using the three channels, it is more difficult to segment
an object in the image based on its color.
![By SharkD [GFDL or CC BY-SA 4.0], from Wikimedia Commons](images/Threshold_inRange_RGB_colorspace.jpg)
Formulas used to convert from one colorspace to another colorspace using @ref cv::cvtColor function
are described in @ref imgproc_color_conversions
Code
----
@add_toggle_cpp
The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp)
@include samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp
@end_toggle
@add_toggle_java
The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java)
@include samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java
@end_toggle
@add_toggle_python
The tutorial code's is shown lines below. You can also download it from
[here](https://github.com/opencv/opencv/tree/master/samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py)
@include samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py
@end_toggle
Explanation
-----------
-# Let's check the general structure of the program:
- Create two Matrix elements to store the frames
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp mat
- Capture the video stream from default capturing device.
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp cap
- Create a window to display the default frame and the threshold frame.
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp window
- Create trackbars to set the range of RGB values
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp trackbar
- Until the user want the program to exit do the following
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp while
- Show the images
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp show
- For a trackbar which controls the lower range, say for example Red value:
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp low
- For a trackbar which controls the upper range, say for example Red value:
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp high
- It is necessary to find the maximum and minimum value to avoid discrepancies such as
the high value of threshold becoming less the low value.
Let's check the general structure of the program:
- Capture the video stream from default or supplied capturing device.
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp cap
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java cap
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py cap
@end_toggle
- Create a window to display the default frame and the threshold frame.
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp window
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java window
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py window
@end_toggle
- Create the trackbars to set the range of HSV values
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp trackbar
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java trackbar
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py trackbar
@end_toggle
- Until the user want the program to exit do the following
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp while
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java while
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py while
@end_toggle
- Show the images
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp show
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java show
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py show
@end_toggle
- For a trackbar which controls the lower range, say for example hue value:
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp low
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java low
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py low
@end_toggle
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp low
- For a trackbar which controls the upper range, say for example hue value:
@add_toggle_cpp
@snippet samples/cpp/tutorial_code/ImgProc/Threshold_inRange.cpp high
@end_toggle
@add_toggle_java
@snippet samples/java/tutorial_code/ImgProc/threshold_inRange/ThresholdInRange.java high
@end_toggle
@add_toggle_python
@snippet samples/python/tutorial_code/imgProc/threshold_inRange/threshold_inRange.py high
@end_toggle
- It is necessary to find the maximum and minimum value to avoid discrepancies such as
the high value of threshold becoming less than the low value.
Results
-------
-# After compiling this program, run it. The program will open two windows
- After compiling this program, run it. The program will open two windows
-# As you set the RGB range values from the trackbar, the resulting frame will be visible in the other window.
- As you set the range values from the trackbar, the resulting frame will be visible in the other window.
![](images/Threshold_inRange_Tutorial_Result_input.jpeg)
![](images/Threshold_inRange_Tutorial_Result_output.jpeg)

View File

@ -80,7 +80,7 @@ else()
set(sources_options EXCLUDE_OPENCL)
endif()
if(WITH_INF_ENGINE AND HAVE_INF_ENGINE)
if(HAVE_INF_ENGINE)
add_definitions(-DHAVE_INF_ENGINE=1)
list(APPEND include_dirs ${INF_ENGINE_INCLUDE_DIRS})
list(APPEND libs ${INF_ENGINE_LIBRARIES})

View File

@ -192,6 +192,10 @@ void InfEngineBackendNet::getName(char*, size_t) noexcept
{
}
void InfEngineBackendNet::getName(char*, size_t) const noexcept
{
}
size_t InfEngineBackendNet::layerCount() noexcept
{
return layers.size();

View File

@ -46,7 +46,9 @@ public:
virtual InferenceEngine::InputInfo::Ptr getInput(const std::string &inputName) noexcept CV_OVERRIDE;
virtual void getName(char *pName, size_t len) noexcept CV_OVERRIDE;
virtual void getName(char *pName, size_t len) noexcept;
virtual void getName(char *pName, size_t len) const noexcept;
virtual size_t layerCount() noexcept CV_OVERRIDE;

View File

@ -521,7 +521,7 @@ TEST(Test_TensorFlow, EAST_text_detection)
Mat geometry = outs[1];
normAssert(scores, blobFromNPY(refScoresPath), "scores");
normAssert(geometry, blobFromNPY(refGeometryPath), "geometry", 5e-5, 1e-3);
normAssert(geometry, blobFromNPY(refGeometryPath), "geometry", 1e-4, 3e-3);
LayerFactory::unregisterLayer("ResizeBilinear");
}

View File

@ -60,6 +60,18 @@ if(HAVE_GDAL)
list(APPEND GRFMT_LIBS ${GDAL_LIBRARY})
endif()
if(HAVE_IMGCODEC_HDR)
add_definitions(-DHAVE_IMGCODEC_HDR)
endif()
if(HAVE_IMGCODEC_SUNRASTER)
add_definitions(-DHAVE_IMGCODEC_SUNRASTER)
endif()
if(HAVE_IMGCODEC_PXM)
add_definitions(-DHAVE_IMGCODEC_PXM)
endif()
file(GLOB grfmt_hdrs ${CMAKE_CURRENT_LIST_DIR}/src/grfmt*.hpp)
file(GLOB grfmt_srcs ${CMAKE_CURRENT_LIST_DIR}/src/grfmt*.cpp)

View File

@ -44,6 +44,8 @@
#include "grfmt_hdr.hpp"
#include "rgbe.hpp"
#ifdef HAVE_IMGCODEC_HDR
namespace cv
{
@ -166,3 +168,5 @@ bool HdrEncoder::isFormatSupported( int depth ) const {
}
}
#endif // HAVE_IMGCODEC_HDR

View File

@ -45,6 +45,8 @@
#include "grfmt_base.hpp"
#ifdef HAVE_IMGCODEC_HDR
namespace cv
{
@ -85,4 +87,6 @@ protected:
}
#endif // HAVE_IMGCODEC_HDR
#endif/*_GRFMT_HDR_H_*/

View File

@ -46,10 +46,13 @@
//
//M*/
#include "precomp.hpp"
#ifdef HAVE_IMGCODEC_PXM
#include <cerrno>
#include "precomp.hpp"
#include "utils.hpp"
#include "grfmt_pam.hpp"
@ -720,3 +723,5 @@ bool PAMEncoder::write( const Mat& img, const std::vector<int>& params )
}
}
#endif

View File

@ -53,6 +53,8 @@
#ifndef _OPENCV_PAM_HPP_
#define _OPENCV_PAM_HPP_
#ifdef HAVE_IMGCODEC_PXM
#include "grfmt_base.hpp"
#include "bitstrm.hpp"
@ -96,4 +98,6 @@ public:
}
#endif
#endif /* _OPENCV_PAM_HPP_ */

View File

@ -45,6 +45,8 @@
#include "grfmt_pxm.hpp"
#include <iostream>
#ifdef HAVE_IMGCODEC_PXM
namespace cv
{
@ -619,3 +621,5 @@ bool PxMEncoder::write(const Mat& img, const std::vector<int>& params)
}
}
#endif // HAVE_IMGCODEC_PXM

View File

@ -46,6 +46,8 @@
#include "grfmt_base.hpp"
#include "bitstrm.hpp"
#ifdef HAVE_IMGCODEC_PXM
namespace cv
{
@ -101,4 +103,6 @@ public:
}
#endif // HAVE_IMGCODEC_PXM
#endif/*_GRFMT_PxM_H_*/

View File

@ -43,6 +43,8 @@
#include "precomp.hpp"
#include "grfmt_sunras.hpp"
#ifdef HAVE_IMGCODEC_SUNRASTER
namespace cv
{
@ -427,3 +429,5 @@ bool SunRasterEncoder::write( const Mat& img, const std::vector<int>& )
}
}
#endif // HAVE_IMGCODEC_SUNRASTER

View File

@ -45,6 +45,8 @@
#include "grfmt_base.hpp"
#ifdef HAVE_IMGCODEC_SUNRASTER
namespace cv
{
@ -102,4 +104,6 @@ public:
}
#endif // HAVE_IMGCODEC_SUNRASTER
#endif/*_GRFMT_SUNRAS_H_*/

View File

@ -131,8 +131,10 @@ struct ImageCodecInitializer
decoders.push_back( makePtr<BmpDecoder>() );
encoders.push_back( makePtr<BmpEncoder>() );
#ifdef HAVE_IMGCODEC_HDR
decoders.push_back( makePtr<HdrDecoder>() );
encoders.push_back( makePtr<HdrEncoder>() );
#endif
#ifdef HAVE_JPEG
decoders.push_back( makePtr<JpegDecoder>() );
encoders.push_back( makePtr<JpegEncoder>() );
@ -141,13 +143,19 @@ struct ImageCodecInitializer
decoders.push_back( makePtr<WebPDecoder>() );
encoders.push_back( makePtr<WebPEncoder>() );
#endif
#ifdef HAVE_IMGCODEC_SUNRASTER
decoders.push_back( makePtr<SunRasterDecoder>() );
encoders.push_back( makePtr<SunRasterEncoder>() );
#endif
#ifdef HAVE_IMGCODEC_PXM
decoders.push_back( makePtr<PxMDecoder>() );
encoders.push_back( makePtr<PxMEncoder>(PXM_TYPE_AUTO) );
encoders.push_back( makePtr<PxMEncoder>(PXM_TYPE_PBM) );
encoders.push_back( makePtr<PxMEncoder>(PXM_TYPE_PGM) );
encoders.push_back( makePtr<PxMEncoder>(PXM_TYPE_PPM) );
decoders.push_back( makePtr<PAMDecoder>() );
encoders.push_back( makePtr<PAMEncoder>() );
#endif
#ifdef HAVE_TIFF
decoders.push_back( makePtr<TiffDecoder>() );
encoders.push_back( makePtr<TiffEncoder>() );
@ -172,8 +180,6 @@ struct ImageCodecInitializer
/// Attach the GDAL Decoder
decoders.push_back( makePtr<GdalDecoder>() );
#endif/*HAVE_GDAL*/
decoders.push_back( makePtr<PAMDecoder>() );
encoders.push_back( makePtr<PAMEncoder>() );
}
std::vector<ImageDecoder> decoders;
@ -716,13 +722,10 @@ bool imwrite( const String& filename, InputArray _img,
{
CV_TRACE_FUNCTION();
std::vector<Mat> img_vec;
//Did we get a Mat or a vector of Mats?
if (_img.isMat() || _img.isUMat())
img_vec.push_back(_img.getMat());
else if (_img.isMatVector() || _img.isUMatVector())
if (_img.isMatVector() || _img.isUMatVector())
_img.getMatVector(img_vec);
else
CV_Error(Error::StsBadArg, "Unknown/unsupported input encountered");
img_vec.push_back(_img.getMat());
CV_Assert(!img_vec.empty());
return imwrite_(filename, img_vec, params, false);

View File

@ -89,7 +89,9 @@ const string all_images[] =
"readwrite/ordinary.bmp",
"readwrite/rle8.bmp",
"readwrite/test_1_c1.jpg",
#ifdef HAVE_IMGCODEC_HDR
"readwrite/rle.hdr"
#endif
};
const int basic_modes[] =
@ -207,11 +209,13 @@ const string all_exts[] =
".jpg",
#endif
".bmp",
#ifdef HAVE_IMGCODEC_PXM
".pam",
".ppm",
".pgm",
".pbm",
".pnm"
#endif
};
vector<Size> all_sizes()
@ -227,6 +231,7 @@ INSTANTIATE_TEST_CASE_P(All, Imgcodecs_ExtSize,
testing::ValuesIn(all_exts),
testing::ValuesIn(all_sizes())));
#ifdef HAVE_IMGCODEC_PXM
typedef testing::TestWithParam<bool> Imgcodecs_pbm;
TEST_P(Imgcodecs_pbm, write_read)
{
@ -259,6 +264,7 @@ TEST_P(Imgcodecs_pbm, write_read)
}
INSTANTIATE_TEST_CASE_P(All, Imgcodecs_pbm, testing::Bool());
#endif
//==================================================================================================
@ -274,6 +280,7 @@ TEST(Imgcodecs_Bmp, read_rle8)
EXPECT_PRED_FORMAT2(cvtest::MatComparator(0, 0), rle, ord);
}
#ifdef HAVE_IMGCODEC_HDR
TEST(Imgcodecs_Hdr, regression)
{
string folder = string(cvtest::TS::ptr()->get_data_path()) + "/readwrite/";
@ -299,7 +306,9 @@ TEST(Imgcodecs_Hdr, regression)
}
remove(tmp_file_name.c_str());
}
#endif
#ifdef HAVE_IMGCODEC_PXM
TEST(Imgcodecs_Pam, read_write)
{
string folder = string(cvtest::TS::ptr()->get_data_path()) + "readwrite/";
@ -326,5 +335,20 @@ TEST(Imgcodecs_Pam, read_write)
remove(writefile.c_str());
remove(writefile_no_param.c_str());
}
#endif
TEST(Imgcodecs, write_parameter_type)
{
cv::Mat m(10, 10, CV_8UC1, cv::Scalar::all(0));
cv::Mat1b m_type = cv::Mat1b::zeros(10, 10);
string tmp_file = cv::tempfile(".bmp");
EXPECT_NO_THROW(cv::imwrite(tmp_file, cv::Mat(m * 2))) << "* Failed with cv::Mat";
EXPECT_NO_THROW(cv::imwrite(tmp_file, m * 2)) << "* Failed with cv::MatExpr";
EXPECT_NO_THROW(cv::imwrite(tmp_file, m_type)) << "* Failed with cv::Mat_";
EXPECT_NO_THROW(cv::imwrite(tmp_file, m_type * 2)) << "* Failed with cv::MatExpr(Mat_)";
cv::Matx<uchar, 10, 10> matx;
EXPECT_NO_THROW(cv::imwrite(tmp_file, matx)) << "* Failed with cv::Matx";
EXPECT_EQ(0, remove(tmp_file.c_str()));
}
}} // namespace

View File

@ -112,8 +112,12 @@ const string exts[] = {
"exr",
#endif
"bmp",
#ifdef HAVE_IMGCODEC_PXM
"ppm",
"ras"
#endif
#ifdef HAVE_IMGCODEC_SUNRASTER
"ras",
#endif
};
INSTANTIATE_TEST_CASE_P(imgcodecs, Imgcodecs_Image, testing::ValuesIn(exts));

View File

@ -56,7 +56,7 @@ if __name__ == "__main__":
log.setLevel(logging.DEBUG if args.verbose else logging.INFO)
test_args = [a for a in other_args if a.startswith("--perf_") or a.startswith("--gtest_")]
test_args = [a for a in other_args if a.startswith("--perf_") or a.startswith("--test_") or a.startswith("--gtest_")]
bad_args = [a for a in other_args if a not in test_args]
if len(bad_args) > 0:
log.error("Error: Bad arguments: %s", bad_args)

View File

@ -7,8 +7,10 @@
#include "opencv2/imgproc.hpp"
#include "opencv2/imgcodecs.hpp"
#include "opencv2/highgui.hpp"
#include <iostream>
using namespace cv;
using std::cout;
/// Global variables
@ -16,7 +18,7 @@ int threshold_value = 0;
int threshold_type = 3;
int const max_value = 255;
int const max_type = 4;
int const max_BINARY_value = 255;
int const max_binary_value = 255;
Mat src, src_gray, dst;
const char* window_name = "Threshold Demo";
@ -24,69 +26,62 @@ const char* window_name = "Threshold Demo";
const char* trackbar_type = "Type: \n 0: Binary \n 1: Binary Inverted \n 2: Truncate \n 3: To Zero \n 4: To Zero Inverted";
const char* trackbar_value = "Value";
/// Function headers
void Threshold_Demo( int, void* );
//![Threshold_Demo]
/**
* @function Threshold_Demo
*/
static void Threshold_Demo( int, void* )
{
/* 0: Binary
1: Binary Inverted
2: Threshold Truncated
3: Threshold to Zero
4: Threshold to Zero Inverted
*/
threshold( src_gray, dst, threshold_value, max_binary_value, threshold_type );
imshow( window_name, dst );
}
//![Threshold_Demo]
/**
* @function main
*/
int main( int argc, char** argv )
{
//! [load]
String imageName("../data/stuff.jpg"); // by default
if (argc > 1)
{
imageName = argv[1];
}
src = imread( imageName, IMREAD_COLOR ); // Load an image
if( src.empty() )
{ return -1; }
cvtColor( src, src_gray, COLOR_BGR2GRAY ); // Convert the image to Gray
//! [load]
//! [window]
namedWindow( window_name, WINDOW_AUTOSIZE ); // Create a window to display results
//! [window]
//! [trackbar]
createTrackbar( trackbar_type,
window_name, &threshold_type,
max_type, Threshold_Demo ); // Create Trackbar to choose type of Threshold
createTrackbar( trackbar_value,
window_name, &threshold_value,
max_value, Threshold_Demo ); // Create Trackbar to choose Threshold value
//! [trackbar]
Threshold_Demo( 0, 0 ); // Call the function to initialize
/// Wait until user finishes program
for(;;)
//! [load]
String imageName("../data/stuff.jpg"); // by default
if (argc > 1)
{
char c = (char)waitKey( 20 );
if( c == 27 )
{ break; }
imageName = argv[1];
}
src = imread( imageName, IMREAD_COLOR ); // Load an image
if (src.empty())
{
cout << "Cannot read image: " << imageName << std::endl;
return -1;
}
cvtColor( src, src_gray, COLOR_BGR2GRAY ); // Convert the image to Gray
//! [load]
//! [window]
namedWindow( window_name, WINDOW_AUTOSIZE ); // Create a window to display results
//! [window]
//! [trackbar]
createTrackbar( trackbar_type,
window_name, &threshold_type,
max_type, Threshold_Demo ); // Create Trackbar to choose type of Threshold
createTrackbar( trackbar_value,
window_name, &threshold_value,
max_value, Threshold_Demo ); // Create Trackbar to choose Threshold value
//! [trackbar]
Threshold_Demo( 0, 0 ); // Call the function to initialize
/// Wait until user finishes program
waitKey();
return 0;
}
//![Threshold_Demo]
/**
* @function Threshold_Demo
*/
void Threshold_Demo( int, void* )
{
/* 0: Binary
1: Binary Inverted
2: Threshold Truncated
3: Threshold to Zero
4: Threshold to Zero Inverted
*/
threshold( src_gray, dst, threshold_value, max_BINARY_value,threshold_type );
imshow( window_name, dst );
}
//![Threshold_Demo]

View File

@ -1,102 +1,104 @@
#include "opencv2/imgproc.hpp"
#include "opencv2/highgui.hpp"
#include <iostream>
#include <stdlib.h>
using namespace std;
using namespace cv;
/** Function Headers */
void on_low_r_thresh_trackbar(int, void *);
void on_high_r_thresh_trackbar(int, void *);
void on_low_g_thresh_trackbar(int, void *);
void on_high_g_thresh_trackbar(int, void *);
void on_low_b_thresh_trackbar(int, void *);
void on_high_b_thresh_trackbar(int, void *);
/** Global Variables */
int low_r=30, low_g=30, low_b=30;
int high_r=100, high_g=100, high_b=100;
const int max_value_H = 360/2;
const int max_value = 255;
const String window_capture_name = "Video Capture";
const String window_detection_name = "Object Detection";
int low_H = 0, low_S = 0, low_V = 0;
int high_H = max_value_H, high_S = max_value, high_V = max_value;
/** @function main */
int main()
//! [low]
static void on_low_H_thresh_trackbar(int, void *)
{
low_H = min(high_H-1, low_H);
setTrackbarPos("Low H", window_detection_name, low_H);
}
//! [low]
//! [high]
static void on_high_H_thresh_trackbar(int, void *)
{
high_H = max(high_H, low_H+1);
setTrackbarPos("High H", window_detection_name, high_H);
}
//! [high]
static void on_low_S_thresh_trackbar(int, void *)
{
low_S = min(high_S-1, low_S);
setTrackbarPos("Low S", window_detection_name, low_S);
}
static void on_high_S_thresh_trackbar(int, void *)
{
high_S = max(high_S, low_S+1);
setTrackbarPos("High S", window_detection_name, high_S);
}
static void on_low_V_thresh_trackbar(int, void *)
{
low_V = min(high_V-1, low_V);
setTrackbarPos("Low V", window_detection_name, low_V);
}
static void on_high_V_thresh_trackbar(int, void *)
{
high_V = max(high_V, low_V+1);
setTrackbarPos("High V", window_detection_name, high_V);
}
int main(int argc, char* argv[])
{
//! [mat]
Mat frame, frame_threshold;
//! [mat]
//! [cap]
VideoCapture cap(0);
VideoCapture cap(argc > 1 ? atoi(argv[1]) : 0);
//! [cap]
//! [window]
namedWindow("Video Capture", WINDOW_NORMAL);
namedWindow("Object Detection", WINDOW_NORMAL);
namedWindow(window_capture_name);
namedWindow(window_detection_name);
//! [window]
//! [trackbar]
//-- Trackbars to set thresholds for RGB values
createTrackbar("Low R","Object Detection", &low_r, 255, on_low_r_thresh_trackbar);
createTrackbar("High R","Object Detection", &high_r, 255, on_high_r_thresh_trackbar);
createTrackbar("Low G","Object Detection", &low_g, 255, on_low_g_thresh_trackbar);
createTrackbar("High G","Object Detection", &high_g, 255, on_high_g_thresh_trackbar);
createTrackbar("Low B","Object Detection", &low_b, 255, on_low_b_thresh_trackbar);
createTrackbar("High B","Object Detection", &high_b, 255, on_high_b_thresh_trackbar);
// Trackbars to set thresholds for HSV values
createTrackbar("Low H", window_detection_name, &low_H, max_value_H, on_low_H_thresh_trackbar);
createTrackbar("High H", window_detection_name, &high_H, max_value_H, on_high_H_thresh_trackbar);
createTrackbar("Low S", window_detection_name, &low_S, max_value, on_low_S_thresh_trackbar);
createTrackbar("High S", window_detection_name, &high_S, max_value, on_high_S_thresh_trackbar);
createTrackbar("Low V", window_detection_name, &low_V, max_value, on_low_V_thresh_trackbar);
createTrackbar("High V", window_detection_name, &high_V, max_value, on_high_V_thresh_trackbar);
//! [trackbar]
while((char)waitKey(1)!='q'){
Mat frame, frame_HSV, frame_threshold;
while (true) {
//! [while]
cap>>frame;
cap >> frame;
if(frame.empty())
{
break;
//-- Detect the object based on RGB Range Values
inRange(frame,Scalar(low_b,low_g,low_r), Scalar(high_b,high_g,high_r),frame_threshold);
}
// Convert from BGR to HSV colorspace
cvtColor(frame, frame_HSV, COLOR_BGR2HSV);
// Detect the object based on HSV Range Values
inRange(frame_HSV, Scalar(low_H, low_S, low_V), Scalar(high_H, high_S, high_V), frame_threshold);
//! [while]
//! [show]
//-- Show the frames
imshow("Video Capture",frame);
imshow("Object Detection",frame_threshold);
// Show the frames
imshow(window_capture_name, frame);
imshow(window_detection_name, frame_threshold);
//! [show]
char key = (char) waitKey(30);
if (key == 'q' || key == 27)
{
break;
}
}
return 0;
}
//! [low]
/** @function on_low_r_thresh_trackbar */
void on_low_r_thresh_trackbar(int, void *)
{
low_r = min(high_r-1, low_r);
setTrackbarPos("Low R","Object Detection", low_r);
}
//! [low]
//! [high]
/** @function on_high_r_thresh_trackbar */
void on_high_r_thresh_trackbar(int, void *)
{
high_r = max(high_r, low_r+1);
setTrackbarPos("High R", "Object Detection", high_r);
}
//![high]
/** @function on_low_g_thresh_trackbar */
void on_low_g_thresh_trackbar(int, void *)
{
low_g = min(high_g-1, low_g);
setTrackbarPos("Low G","Object Detection", low_g);
}
/** @function on_high_g_thresh_trackbar */
void on_high_g_thresh_trackbar(int, void *)
{
high_g = max(high_g, low_g+1);
setTrackbarPos("High G", "Object Detection", high_g);
}
/** @function on_low_b_thresh_trackbar */
void on_low_b_thresh_trackbar(int, void *)
{
low_b= min(high_b-1, low_b);
setTrackbarPos("Low B","Object Detection", low_b);
}
/** @function on_high_b_thresh_trackbar */
void on_high_b_thresh_trackbar(int, void *)
{
high_b = max(high_b, low_b+1);
setTrackbarPos("High B", "Object Detection", high_b);
}

View File

@ -14,11 +14,10 @@ using namespace cv;
Mat src, src_gray;
Mat dst, detected_edges;
int edgeThresh = 1;
int lowThreshold;
int const max_lowThreshold = 100;
int ratio = 3;
int kernel_size = 3;
int lowThreshold = 0;
const int max_lowThreshold = 100;
const int ratio = 3;
const int kernel_size = 3;
const char* window_name = "Edge Map";
//![variables]

View File

@ -11,90 +11,104 @@
using namespace cv;
/// Global variables
Mat src, dst;
Mat map_x, map_y;
const char* remap_window = "Remap demo";
int ind = 0;
/// Function Headers
void update_map( void );
void update_map( int &ind, Mat &map_x, Mat &map_y );
/**
* @function main
*/
int main(int argc, const char** argv)
{
/// Load the image
CommandLineParser parser(argc, argv, "{@image |../data/chicky_512.png|input image name}");
std::string filename = parser.get<std::string>(0);
src = imread( filename, IMREAD_COLOR );
CommandLineParser parser(argc, argv, "{@image |../data/chicky_512.png|input image name}");
std::string filename = parser.get<std::string>(0);
//! [Load]
/// Load the image
Mat src = imread( filename, IMREAD_COLOR );
if (src.empty())
{
std::cout << "Cannot read image: " << filename << std::endl;
return -1;
}
//! [Load]
/// Create dst, map_x and map_y with the same size as src:
dst.create( src.size(), src.type() );
map_x.create( src.size(), CV_32FC1 );
map_y.create( src.size(), CV_32FC1 );
//! [Create]
/// Create dst, map_x and map_y with the same size as src:
Mat dst(src.size(), src.type());
Mat map_x(src.size(), CV_32FC1);
Mat map_y(src.size(), CV_32FC1);
//! [Create]
/// Create window
namedWindow( remap_window, WINDOW_AUTOSIZE );
//! [Window]
/// Create window
const char* remap_window = "Remap demo";
namedWindow( remap_window, WINDOW_AUTOSIZE );
//! [Window]
/// Loop
for(;;)
{
/// Each 1 sec. Press ESC to exit the program
char c = (char)waitKey( 1000 );
//! [Loop]
/// Index to switch between the remap modes
int ind = 0;
for(;;)
{
/// Update map_x & map_y. Then apply remap
update_map(ind, map_x, map_y);
remap( src, dst, map_x, map_y, INTER_LINEAR, BORDER_CONSTANT, Scalar(0, 0, 0) );
if( c == 27 )
{ break; }
/// Display results
imshow( remap_window, dst );
/// Update map_x & map_y. Then apply remap
update_map();
remap( src, dst, map_x, map_y, INTER_LINEAR, BORDER_CONSTANT, Scalar(0, 0, 0) );
// Display results
imshow( remap_window, dst );
}
return 0;
/// Each 1 sec. Press ESC to exit the program
char c = (char)waitKey( 1000 );
if( c == 27 )
{
break;
}
}
//! [Loop]
return 0;
}
/**
* @function update_map
* @brief Fill the map_x and map_y matrices with 4 types of mappings
*/
void update_map( void )
//! [Update]
void update_map( int &ind, Mat &map_x, Mat &map_y )
{
ind = ind%4;
for( int j = 0; j < src.rows; j++ )
{ for( int i = 0; i < src.cols; i++ )
{
switch( ind )
{
case 0:
if( i > src.cols*0.25 && i < src.cols*0.75 && j > src.rows*0.25 && j < src.rows*0.75 )
{
map_x.at<float>(j,i) = 2*( i - src.cols*0.25f ) + 0.5f ;
map_y.at<float>(j,i) = 2*( j - src.rows*0.25f ) + 0.5f ;
}
else
{ map_x.at<float>(j,i) = 0 ;
map_y.at<float>(j,i) = 0 ;
}
break;
case 1:
map_x.at<float>(j,i) = (float)i ;
map_y.at<float>(j,i) = (float)(src.rows - j) ;
break;
case 2:
map_x.at<float>(j,i) = (float)(src.cols - i) ;
map_y.at<float>(j,i) = (float)j ;
break;
case 3:
map_x.at<float>(j,i) = (float)(src.cols - i) ;
map_y.at<float>(j,i) = (float)(src.rows - j) ;
break;
} // end of switch
}
for( int i = 0; i < map_x.rows; i++ )
{
for( int j = 0; j < map_x.cols; j++ )
{
switch( ind )
{
case 0:
if( j > map_x.cols*0.25 && j < map_x.cols*0.75 && i > map_x.rows*0.25 && i < map_x.rows*0.75 )
{
map_x.at<float>(i, j) = 2*( j - map_x.cols*0.25f ) + 0.5f;
map_y.at<float>(i, j) = 2*( i - map_x.rows*0.25f ) + 0.5f;
}
else
{
map_x.at<float>(i, j) = 0;
map_y.at<float>(i, j) = 0;
}
break;
case 1:
map_x.at<float>(i, j) = (float)j;
map_y.at<float>(i, j) = (float)(map_x.rows - i);
break;
case 2:
map_x.at<float>(i, j) = (float)(map_x.cols - j);
map_y.at<float>(i, j) = (float)i;
break;
case 3:
map_x.at<float>(i, j) = (float)(map_x.cols - j);
map_y.at<float>(i, j) = (float)(map_x.rows - i);
break;
default:
break;
} // end of switch
}
}
ind++;
ind = (ind+1) % 4;
}
//! [Update]

View File

@ -0,0 +1,144 @@
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Image;
import javax.swing.BoxLayout;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JSlider;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
public class Threshold {
private static int MAX_VALUE = 255;
private static int MAX_TYPE = 4;
private static int MAX_BINARY_VALUE = 255;
private static final String WINDOW_NAME = "Threshold Demo";
private static final String TRACKBAR_TYPE = "<html><body>Type: <br> 0: Binary <br> "
+ "1: Binary Inverted <br> 2: Truncate <br> "
+ "3: To Zero <br> 4: To Zero Inverted</body></html>";
private static final String TRACKBAR_VALUE = "Value";
private int thresholdValue = 0;
private int thresholdType = 3;
private Mat src;
private Mat srcGray = new Mat();
private Mat dst = new Mat();
private JFrame frame;
private JLabel imgLabel;
public Threshold(String[] args) {
//! [load]
String imagePath = "../data/stuff.jpg";
if (args.length > 0) {
imagePath = args[0];
}
// Load an image
src = Imgcodecs.imread(imagePath);
if (src.empty()) {
System.out.println("Empty image: " + imagePath);
System.exit(0);
}
// Convert the image to Gray
Imgproc.cvtColor(src, srcGray, Imgproc.COLOR_BGR2GRAY);
//! [load]
//! [window]
// Create and set up the window.
frame = new JFrame(WINDOW_NAME);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
// Set up the content pane.
Image img = HighGui.toBufferedImage(srcGray);
addComponentsToPane(frame.getContentPane(), img);
// Use the content pane's default BorderLayout. No need for
// setLayout(new BorderLayout());
// Display the window.
frame.pack();
frame.setVisible(true);
//! [window]
}
private void addComponentsToPane(Container pane, Image img) {
if (!(pane.getLayout() instanceof BorderLayout)) {
pane.add(new JLabel("Container doesn't use BorderLayout!"));
return;
}
JPanel sliderPanel = new JPanel();
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
//! [trackbar]
sliderPanel.add(new JLabel(TRACKBAR_TYPE));
// Create Trackbar to choose type of Threshold
JSlider sliderThreshType = new JSlider(0, MAX_TYPE, thresholdType);
sliderThreshType.setMajorTickSpacing(1);
sliderThreshType.setMinorTickSpacing(1);
sliderThreshType.setPaintTicks(true);
sliderThreshType.setPaintLabels(true);
sliderPanel.add(sliderThreshType);
sliderPanel.add(new JLabel(TRACKBAR_VALUE));
// Create Trackbar to choose Threshold value
JSlider sliderThreshValue = new JSlider(0, MAX_VALUE, 0);
sliderThreshValue.setMajorTickSpacing(50);
sliderThreshValue.setMinorTickSpacing(10);
sliderThreshValue.setPaintTicks(true);
sliderThreshValue.setPaintLabels(true);
sliderPanel.add(sliderThreshValue);
//! [trackbar]
//! [on_trackbar]
sliderThreshType.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider) e.getSource();
thresholdType = source.getValue();
update();
}
});
sliderThreshValue.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider) e.getSource();
thresholdValue = source.getValue();
update();
}
});
//! [on_trackbar]
pane.add(sliderPanel, BorderLayout.PAGE_START);
imgLabel = new JLabel(new ImageIcon(img));
pane.add(imgLabel, BorderLayout.CENTER);
}
//! [Threshold_Demo]
private void update() {
Imgproc.threshold(srcGray, dst, thresholdValue, MAX_BINARY_VALUE, thresholdType);
Image img = HighGui.toBufferedImage(dst);
imgLabel.setIcon(new ImageIcon(img));
frame.repaint();
}
//! [Threshold_Demo]
public static void main(String[] args) {
// Load the native OpenCV library
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
// Schedule a job for the event dispatch thread:
// creating and showing this application's GUI.
javax.swing.SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
new Threshold(args);
}
});
}
}

View File

@ -0,0 +1,259 @@
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Image;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.util.List;
import javax.swing.BoxLayout;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JSlider;
import javax.swing.SwingWorker;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.highgui.HighGui;
import org.opencv.imgproc.Imgproc;
import org.opencv.videoio.VideoCapture;
public class ThresholdInRange {
private static int MAX_VALUE = 255;
private static int MAX_VALUE_H = 360/2;
private static final String WINDOW_NAME = "Thresholding Operations using inRange demo";
private static final String LOW_H_NAME = "Low H";
private static final String LOW_S_NAME = "Low S";
private static final String LOW_V_NAME = "Low V";
private static final String HIGH_H_NAME = "High H";
private static final String HIGH_S_NAME = "High S";
private static final String HIGH_V_NAME = "High V";
private JSlider sliderLowH;
private JSlider sliderHighH;
private JSlider sliderLowS;
private JSlider sliderHighS;
private JSlider sliderLowV;
private JSlider sliderHighV;
private VideoCapture cap;
private Mat matFrame = new Mat();
private JFrame frame;
private JLabel imgCaptureLabel;
private JLabel imgDetectionLabel;
private CaptureTask captureTask;
public ThresholdInRange(String[] args) {
int cameraDevice = 0;
if (args.length > 0) {
cameraDevice = Integer.parseInt(args[0]);
}
//! [cap]
cap = new VideoCapture(cameraDevice);
//! [cap]
if (!cap.isOpened()) {
System.err.println("Cannot open camera: " + cameraDevice);
System.exit(0);
}
if (!cap.read(matFrame)) {
System.err.println("Cannot read camera stream.");
System.exit(0);
}
//! [window]
// Create and set up the window.
frame = new JFrame(WINDOW_NAME);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent windowEvent) {
captureTask.cancel(true);
}
});
// Set up the content pane.
Image img = HighGui.toBufferedImage(matFrame);
addComponentsToPane(frame.getContentPane(), img);
// Use the content pane's default BorderLayout. No need for
// setLayout(new BorderLayout());
// Display the window.
frame.pack();
frame.setVisible(true);
//! [window]
captureTask = new CaptureTask();
captureTask.execute();
}
//! [while]
private class CaptureTask extends SwingWorker<Void, Mat> {
@Override
protected Void doInBackground() {
Mat matFrame = new Mat();
while (!isCancelled()) {
if (!cap.read(matFrame)) {
break;
}
publish(matFrame.clone());
}
return null;
}
@Override
protected void process(List<Mat> frames) {
Mat frame = frames.get(frames.size() - 1);
Mat frameHSV = new Mat();
Imgproc.cvtColor(frame, frameHSV, Imgproc.COLOR_BGR2HSV);
Mat thresh = new Mat();
Core.inRange(frameHSV, new Scalar(sliderLowH.getValue(), sliderLowS.getValue(), sliderLowV.getValue()),
new Scalar(sliderHighH.getValue(), sliderHighS.getValue(), sliderHighV.getValue()), thresh);
update(frame, thresh);
}
}
//! [while]
private void addComponentsToPane(Container pane, Image img) {
if (!(pane.getLayout() instanceof BorderLayout)) {
pane.add(new JLabel("Container doesn't use BorderLayout!"));
return;
}
JPanel sliderPanel = new JPanel();
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
//! [trackbar]
sliderPanel.add(new JLabel(LOW_H_NAME));
sliderLowH = new JSlider(0, MAX_VALUE_H, 0);
sliderLowH.setMajorTickSpacing(50);
sliderLowH.setMinorTickSpacing(10);
sliderLowH.setPaintTicks(true);
sliderLowH.setPaintLabels(true);
sliderPanel.add(sliderLowH);
sliderPanel.add(new JLabel(HIGH_H_NAME));
sliderHighH = new JSlider(0, MAX_VALUE_H, MAX_VALUE_H);
sliderHighH.setMajorTickSpacing(50);
sliderHighH.setMinorTickSpacing(10);
sliderHighH.setPaintTicks(true);
sliderHighH.setPaintLabels(true);
sliderPanel.add(sliderHighH);
sliderPanel.add(new JLabel(LOW_S_NAME));
sliderLowS = new JSlider(0, MAX_VALUE, 0);
sliderLowS.setMajorTickSpacing(50);
sliderLowS.setMinorTickSpacing(10);
sliderLowS.setPaintTicks(true);
sliderLowS.setPaintLabels(true);
sliderPanel.add(sliderLowS);
sliderPanel.add(new JLabel(HIGH_S_NAME));
sliderHighS = new JSlider(0, MAX_VALUE, MAX_VALUE);
sliderHighS.setMajorTickSpacing(50);
sliderHighS.setMinorTickSpacing(10);
sliderHighS.setPaintTicks(true);
sliderHighS.setPaintLabels(true);
sliderPanel.add(sliderHighS);
sliderPanel.add(new JLabel(LOW_V_NAME));
sliderLowV = new JSlider(0, MAX_VALUE, 0);
sliderLowV.setMajorTickSpacing(50);
sliderLowV.setMinorTickSpacing(10);
sliderLowV.setPaintTicks(true);
sliderLowV.setPaintLabels(true);
sliderPanel.add(sliderLowV);
sliderPanel.add(new JLabel(HIGH_V_NAME));
sliderHighV = new JSlider(0, MAX_VALUE, MAX_VALUE);
sliderHighV.setMajorTickSpacing(50);
sliderHighV.setMinorTickSpacing(10);
sliderHighV.setPaintTicks(true);
sliderHighV.setPaintLabels(true);
sliderPanel.add(sliderHighV);
//! [trackbar]
//! [low]
sliderLowH.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider) e.getSource();
int valH = Math.min(sliderHighH.getValue()-1, source.getValue());
sliderLowH.setValue(valH);
}
});
//! [low]
//! [high]
sliderHighH.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider) e.getSource();
int valH = Math.max(source.getValue(), sliderLowH.getValue()+1);
sliderHighH.setValue(valH);
}
});
//! [high]
sliderLowS.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider) e.getSource();
int valS = Math.min(sliderHighS.getValue()-1, source.getValue());
sliderLowS.setValue(valS);
}
});
sliderHighS.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider) e.getSource();
int valS = Math.max(source.getValue(), sliderLowS.getValue()+1);
sliderHighS.setValue(valS);
}
});
sliderLowV.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider) e.getSource();
int valV = Math.min(sliderHighV.getValue()-1, source.getValue());
sliderLowV.setValue(valV);
}
});
sliderHighV.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider) e.getSource();
int valV = Math.max(source.getValue(), sliderLowV.getValue()+1);
sliderHighV.setValue(valV);
}
});
pane.add(sliderPanel, BorderLayout.PAGE_START);
JPanel framePanel = new JPanel();
imgCaptureLabel = new JLabel(new ImageIcon(img));
framePanel.add(imgCaptureLabel);
imgDetectionLabel = new JLabel(new ImageIcon(img));
framePanel.add(imgDetectionLabel);
pane.add(framePanel, BorderLayout.CENTER);
}
private void update(Mat imgCapture, Mat imgThresh) {
//! [show]
imgCaptureLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(imgCapture)));
imgDetectionLabel.setIcon(new ImageIcon(HighGui.toBufferedImage(imgThresh)));
frame.repaint();
//! [show]
}
public static void main(String[] args) {
// Load the native OpenCV library
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
// Schedule a job for the event dispatch thread:
// creating and showing this application's GUI.
javax.swing.SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
new ThresholdInRange(args);
}
});
}
}

View File

@ -0,0 +1,110 @@
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Image;
import javax.swing.BoxLayout;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JSlider;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
public class CannyDetectorDemo {
private static final int MAX_LOW_THRESHOLD = 100;
private static final int RATIO = 3;
private static final int KERNEL_SIZE = 3;
private static final Size BLUR_SIZE = new Size(3,3);
private int lowThresh = 0;
private Mat src;
private Mat srcBlur = new Mat();
private Mat detectedEdges = new Mat();
private Mat dst = new Mat();
private JFrame frame;
private JLabel imgLabel;
public CannyDetectorDemo(String[] args) {
String imagePath = args.length > 0 ? args[0] : "../data/fruits.jpg";
src = Imgcodecs.imread(imagePath);
if (src.empty()) {
System.out.println("Empty image: " + imagePath);
System.exit(0);
}
// Create and set up the window.
frame = new JFrame("Edge Map (Canny detector demo)");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
// Set up the content pane.
Image img = HighGui.toBufferedImage(src);
addComponentsToPane(frame.getContentPane(), img);
// Use the content pane's default BorderLayout. No need for
// setLayout(new BorderLayout());
// Display the window.
frame.pack();
frame.setVisible(true);
}
private void addComponentsToPane(Container pane, Image img) {
if (!(pane.getLayout() instanceof BorderLayout)) {
pane.add(new JLabel("Container doesn't use BorderLayout!"));
return;
}
JPanel sliderPanel = new JPanel();
sliderPanel.setLayout(new BoxLayout(sliderPanel, BoxLayout.PAGE_AXIS));
sliderPanel.add(new JLabel("Min Threshold:"));
JSlider slider = new JSlider(0, MAX_LOW_THRESHOLD, 0);
slider.setMajorTickSpacing(10);
slider.setMinorTickSpacing(5);
slider.setPaintTicks(true);
slider.setPaintLabels(true);
slider.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider) e.getSource();
lowThresh = source.getValue();
update();
}
});
sliderPanel.add(slider);
pane.add(sliderPanel, BorderLayout.PAGE_START);
imgLabel = new JLabel(new ImageIcon(img));
pane.add(imgLabel, BorderLayout.CENTER);
}
private void update() {
Imgproc.blur(src, srcBlur, BLUR_SIZE);
Imgproc.Canny(srcBlur, detectedEdges, lowThresh, lowThresh * RATIO, KERNEL_SIZE, false);
dst = new Mat(src.size(), CvType.CV_8UC3, Scalar.all(0));
src.copyTo(dst, detectedEdges);
Image img = HighGui.toBufferedImage(dst);
imgLabel.setIcon(new ImageIcon(img));
frame.repaint();
}
public static void main(String[] args) {
// Load the native OpenCV library
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
// Schedule a job for the event dispatch thread:
// creating and showing this application's GUI.
javax.swing.SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
new CannyDetectorDemo(args);
}
});
}
}

View File

@ -0,0 +1,98 @@
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
class Remap {
private Mat mapX = new Mat();
private Mat mapY = new Mat();
private Mat dst = new Mat();
private int ind = 0;
//! [Update]
private void updateMap() {
float buffX[] = new float[(int) (mapX.total() * mapX.channels())];
mapX.get(0, 0, buffX);
float buffY[] = new float[(int) (mapY.total() * mapY.channels())];
mapY.get(0, 0, buffY);
for (int i = 0; i < mapX.rows(); i++) {
for (int j = 0; j < mapX.cols(); j++) {
switch (ind) {
case 0:
if( j > mapX.cols()*0.25 && j < mapX.cols()*0.75 && i > mapX.rows()*0.25 && i < mapX.rows()*0.75 ) {
buffX[i*mapX.cols() + j] = 2*( j - mapX.cols()*0.25f ) + 0.5f;
buffY[i*mapY.cols() + j] = 2*( i - mapX.rows()*0.25f ) + 0.5f;
} else {
buffX[i*mapX.cols() + j] = 0;
buffY[i*mapY.cols() + j] = 0;
}
break;
case 1:
buffX[i*mapX.cols() + j] = j;
buffY[i*mapY.cols() + j] = mapY.rows() - i;
break;
case 2:
buffX[i*mapX.cols() + j] = mapY.cols() - j;
buffY[i*mapY.cols() + j] = i;
break;
case 3:
buffX[i*mapX.cols() + j] = mapY.cols() - j;
buffY[i*mapY.cols() + j] = mapY.rows() - i;
break;
default:
break;
}
}
}
mapX.put(0, 0, buffX);
mapY.put(0, 0, buffY);
ind = (ind+1) % 4;
}
//! [Update]
public void run(String[] args) {
String filename = args.length > 0 ? args[0] : "../data/chicky_512.png";
//! [Load]
Mat src = Imgcodecs.imread(filename, Imgcodecs.IMREAD_COLOR);
if (src.empty()) {
System.err.println("Cannot read image: " + filename);
System.exit(0);
}
//! [Load]
//! [Create]
mapX = new Mat(src.size(), CvType.CV_32F);
mapY = new Mat(src.size(), CvType.CV_32F);
//! [Create]
//! [Window]
final String winname = "Remap demo";
HighGui.namedWindow(winname, HighGui.WINDOW_AUTOSIZE);
//! [Window]
//! [Loop]
for (;;) {
updateMap();
Imgproc.remap(src, dst, mapX, mapY, Imgproc.INTER_LINEAR);
HighGui.imshow(winname, dst);
if (HighGui.waitKey(1000) == 27) {
break;
}
}
//! [Loop]
System.exit(0);
}
}
public class RemapDemo {
public static void main(String[] args) {
// Load the native OpenCV library
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
new Remap().run(args);
}
}

View File

@ -0,0 +1,34 @@
from __future__ import print_function
import cv2 as cv
import argparse
max_lowThreshold = 100
window_name = 'Edge Map'
title_trackbar = 'Min Threshold:'
ratio = 3
kernel_size = 3
def CannyThreshold(val):
low_threshold = val
img_blur = cv.blur(src_gray, (3,3))
detected_edges = cv.Canny(img_blur, low_threshold, low_threshold*ratio, kernel_size)
mask = detected_edges != 0
dst = src * (mask[:,:,None].astype(src.dtype))
cv.imshow(window_name, dst)
parser = argparse.ArgumentParser(description='Code for Canny Edge Detector tutorial.')
parser.add_argument('--input', help='Path to input image.', default='../data/fruits.jpg')
args = parser.parse_args()
src = cv.imread(args.input)
if src is None:
print('Could not open or find the image: ', args.input)
exit(0)
src_gray = cv.cvtColor(src, cv.COLOR_BGR2GRAY)
cv.namedWindow(window_name)
cv.createTrackbar(title_trackbar, window_name , 0, max_lowThreshold, CannyThreshold)
CannyThreshold(0)
cv.waitKey()

View File

@ -0,0 +1,65 @@
from __future__ import print_function
import cv2 as cv
import numpy as np
import argparse
## [Update]
def update_map(ind, map_x, map_y):
if ind == 0:
for i in range(map_x.shape[0]):
for j in range(map_x.shape[1]):
if j > map_x.shape[1]*0.25 and j < map_x.shape[1]*0.75 and i > map_x.shape[0]*0.25 and i < map_x.shape[0]*0.75:
map_x[i,j] = 2 * (j-map_x.shape[1]*0.25) + 0.5
map_y[i,j] = 2 * (i-map_y.shape[0]*0.25) + 0.5
else:
map_x[i,j] = 0
map_y[i,j] = 0
elif ind == 1:
for i in range(map_x.shape[0]):
map_x[i,:] = [x for x in range(map_x.shape[1])]
for j in range(map_y.shape[1]):
map_y[:,j] = [map_y.shape[0]-y for y in range(map_y.shape[0])]
elif ind == 2:
for i in range(map_x.shape[0]):
map_x[i,:] = [map_x.shape[1]-x for x in range(map_x.shape[1])]
for j in range(map_y.shape[1]):
map_y[:,j] = [y for y in range(map_y.shape[0])]
elif ind == 3:
for i in range(map_x.shape[0]):
map_x[i,:] = [map_x.shape[1]-x for x in range(map_x.shape[1])]
for j in range(map_y.shape[1]):
map_y[:,j] = [map_y.shape[0]-y for y in range(map_y.shape[0])]
## [Update]
parser = argparse.ArgumentParser(description='Code for Remapping tutorial.')
parser.add_argument('--input', help='Path to input image.', default='../data/chicky_512.png')
args = parser.parse_args()
## [Load]
src = cv.imread(args.input, cv.IMREAD_COLOR)
if src is None:
print('Could not open or find the image: ', args.input)
exit(0)
## [Load]
## [Create]
map_x = np.zeros((src.shape[0], src.shape[1]), dtype=np.float32)
map_y = np.zeros((src.shape[0], src.shape[1]), dtype=np.float32)
## [Create]
## [Window]
window_name = 'Remap demo'
cv.namedWindow(window_name)
## [Window]
## [Loop]
ind = 0
while True:
update_map(ind, map_x, map_y)
ind = (ind + 1) % 4
dst = cv.remap(src, map_x, map_y, cv.INTER_LINEAR)
cv.imshow(window_name, dst)
c = cv.waitKey(1000)
if c == 27:
break
## [Loop]

View File

@ -0,0 +1,54 @@
from __future__ import print_function
import cv2 as cv
import argparse
max_value = 255
max_type = 4
max_binary_value = 255
trackbar_type = 'Type: \n 0: Binary \n 1: Binary Inverted \n 2: Truncate \n 3: To Zero \n 4: To Zero Inverted'
trackbar_value = 'Value'
window_name = 'Threshold Demo'
## [Threshold_Demo]
def Threshold_Demo(val):
#0: Binary
#1: Binary Inverted
#2: Threshold Truncated
#3: Threshold to Zero
#4: Threshold to Zero Inverted
threshold_type = cv.getTrackbarPos(trackbar_type, window_name)
threshold_value = cv.getTrackbarPos(trackbar_value, window_name)
_, dst = cv.threshold(src_gray, threshold_value, max_binary_value, threshold_type )
cv.imshow(window_name, dst)
## [Threshold_Demo]
parser = argparse.ArgumentParser(description='Code for Basic Thresholding Operations tutorial.')
parser.add_argument('--input', help='Path to input image.', default='../data/stuff.jpg')
args = parser.parse_args()
## [load]
# Load an image
src = cv.imread(args.input)
if src is None:
print('Could not open or find the image: ', args.input)
exit(0)
# Convert the image to Gray
src_gray = cv.cvtColor(src, cv.COLOR_BGR2GRAY);
## [load]
## [window]
# Create a window to display results
cv.namedWindow(window_name)
## [window]
## [trackbar]
# Create Trackbar to choose type of Threshold
cv.createTrackbar(trackbar_type, window_name , 3, max_type, Threshold_Demo)
# Create Trackbar to choose Threshold value
cv.createTrackbar(trackbar_value, window_name , 0, max_value, Threshold_Demo)
## [trackbar]
# Call the function to initialize
Threshold_Demo(0)
# Wait until user finishes program
cv.waitKey()

View File

@ -0,0 +1,107 @@
from __future__ import print_function
import cv2 as cv
import argparse
max_value = 255
max_value_H = 360//2
low_H = 0
low_S = 0
low_V = 0
high_H = max_value_H
high_S = max_value
high_V = max_value
window_capture_name = 'Video Capture'
window_detection_name = 'Object Detection'
low_H_name = 'Low H'
low_S_name = 'Low S'
low_V_name = 'Low V'
high_H_name = 'High H'
high_S_name = 'High S'
high_V_name = 'High V'
## [low]
def on_low_H_thresh_trackbar(val):
global low_H
global high_H
low_H = val
low_H = min(high_H-1, low_H)
cv.setTrackbarPos(low_H_name, window_detection_name, low_H)
## [low]
## [high]
def on_high_H_thresh_trackbar(val):
global low_H
global high_H
high_H = val
high_H = max(high_H, low_H+1)
cv.setTrackbarPos(high_H_name, window_detection_name, high_H)
## [high]
def on_low_S_thresh_trackbar(val):
global low_S
global high_S
low_S = val
low_S = min(high_S-1, low_S)
cv.setTrackbarPos(low_S_name, window_detection_name, low_S)
def on_high_S_thresh_trackbar(val):
global low_S
global high_S
high_S = val
high_S = max(high_S, low_S+1)
cv.setTrackbarPos(high_S_name, window_detection_name, high_S)
def on_low_V_thresh_trackbar(val):
global low_V
global high_V
low_V = val
low_V = min(high_V-1, low_V)
cv.setTrackbarPos(low_V_name, window_detection_name, low_V)
def on_high_V_thresh_trackbar(val):
global low_V
global high_V
high_V = val
high_V = max(high_V, low_V+1)
cv.setTrackbarPos(high_V_name, window_detection_name, high_V)
parser = argparse.ArgumentParser(description='Code for Thresholding Operations using inRange tutorial.')
parser.add_argument('--camera', help='Camera devide number.', default=0, type=int)
args = parser.parse_args()
## [cap]
cap = cv.VideoCapture(args.camera)
## [cap]
## [window]
cv.namedWindow(window_capture_name)
cv.namedWindow(window_detection_name)
## [window]
## [trackbar]
cv.createTrackbar(low_H_name, window_detection_name , low_H, max_value_H, on_low_H_thresh_trackbar)
cv.createTrackbar(high_H_name, window_detection_name , high_H, max_value_H, on_high_H_thresh_trackbar)
cv.createTrackbar(low_S_name, window_detection_name , low_S, max_value, on_low_S_thresh_trackbar)
cv.createTrackbar(high_S_name, window_detection_name , high_S, max_value, on_high_S_thresh_trackbar)
cv.createTrackbar(low_V_name, window_detection_name , low_V, max_value, on_low_V_thresh_trackbar)
cv.createTrackbar(high_V_name, window_detection_name , high_V, max_value, on_high_V_thresh_trackbar)
## [trackbar]
while True:
## [while]
ret, frame = cap.read()
if frame is None:
break
frame_HSV = cv.cvtColor(frame, cv.COLOR_BGR2HSV)
frame_threshold = cv.inRange(frame_HSV, (low_H, low_S, low_V), (high_H, high_S, high_V));
## [while]
## [show]
cv.imshow(window_capture_name, frame)
cv.imshow(window_detection_name, frame_threshold)
## [show]
key = cv.waitKey(30)
if key == ord('q') or key == 27:
break