mirror of
https://github.com/opencv/opencv.git
synced 2025-06-07 17:44:04 +08:00
Merge remote-tracking branch 'upstream/3.4' into merge-3.4
This commit is contained in:
commit
f663e8f903
@ -62,8 +62,8 @@ We need **CMake** to configure the installation, **GCC** for compilation, **Pyth
|
|||||||
|
|
||||||
```
|
```
|
||||||
sudo apt-get install cmake
|
sudo apt-get install cmake
|
||||||
sudo apt-get install python-devel numpy
|
sudo apt-get install python-dev python-numpy
|
||||||
sudo apt-get install gcc gcc-c++
|
sudo apt-get install gcc g++
|
||||||
```
|
```
|
||||||
|
|
||||||
Next we need **GTK** support for GUI features, Camera support (v4l), Media Support
|
Next we need **GTK** support for GUI features, Camera support (v4l), Media Support
|
||||||
|
@ -546,6 +546,43 @@ void ONNXImporter::populateNet(Net dstNet)
|
|||||||
{
|
{
|
||||||
replaceLayerParam(layerParams, "size", "local_size");
|
replaceLayerParam(layerParams, "size", "local_size");
|
||||||
}
|
}
|
||||||
|
else if (layer_type == "InstanceNormalization")
|
||||||
|
{
|
||||||
|
if (node_proto.input_size() != 3)
|
||||||
|
CV_Error(Error::StsNotImplemented,
|
||||||
|
"Expected input, scale, bias");
|
||||||
|
|
||||||
|
layerParams.blobs.resize(4);
|
||||||
|
layerParams.blobs[2] = getBlob(node_proto, constBlobs, 1); // weightData
|
||||||
|
layerParams.blobs[3] = getBlob(node_proto, constBlobs, 2); // biasData
|
||||||
|
layerParams.set("has_bias", true);
|
||||||
|
layerParams.set("has_weight", true);
|
||||||
|
|
||||||
|
// Get number of channels in input
|
||||||
|
int size = layerParams.blobs[2].total();
|
||||||
|
layerParams.blobs[0] = Mat::zeros(size, 1, CV_32F); // mean
|
||||||
|
layerParams.blobs[1] = Mat::ones(size, 1, CV_32F); // std
|
||||||
|
|
||||||
|
LayerParams mvnParams;
|
||||||
|
mvnParams.name = layerParams.name + "/MVN";
|
||||||
|
mvnParams.type = "MVN";
|
||||||
|
mvnParams.set("eps", layerParams.get<float>("epsilon"));
|
||||||
|
layerParams.erase("epsilon");
|
||||||
|
|
||||||
|
//Create MVN layer
|
||||||
|
int id = dstNet.addLayer(mvnParams.name, mvnParams.type, mvnParams);
|
||||||
|
//Connect to input
|
||||||
|
layerId = layer_id.find(node_proto.input(0));
|
||||||
|
CV_Assert(layerId != layer_id.end());
|
||||||
|
dstNet.connect(layerId->second.layerId, layerId->second.outputId, id, 0);
|
||||||
|
//Add shape
|
||||||
|
layer_id.insert(std::make_pair(mvnParams.name, LayerInfo(id, 0)));
|
||||||
|
outShapes[mvnParams.name] = outShapes[node_proto.input(0)];
|
||||||
|
|
||||||
|
//Replace Batch Norm's input to MVN
|
||||||
|
node_proto.set_input(0, mvnParams.name);
|
||||||
|
layerParams.type = "BatchNorm";
|
||||||
|
}
|
||||||
else if (layer_type == "BatchNormalization")
|
else if (layer_type == "BatchNormalization")
|
||||||
{
|
{
|
||||||
if (node_proto.input_size() != 5)
|
if (node_proto.input_size() != 5)
|
||||||
|
@ -76,6 +76,14 @@ public:
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
TEST_P(Test_ONNX_layers, InstanceNorm)
|
||||||
|
{
|
||||||
|
if (target == DNN_TARGET_MYRIAD)
|
||||||
|
testONNXModels("instancenorm", npy, 0, 0, false, false);
|
||||||
|
else
|
||||||
|
testONNXModels("instancenorm", npy);
|
||||||
|
}
|
||||||
|
|
||||||
TEST_P(Test_ONNX_layers, MaxPooling)
|
TEST_P(Test_ONNX_layers, MaxPooling)
|
||||||
{
|
{
|
||||||
testONNXModels("maxpooling");
|
testONNXModels("maxpooling");
|
||||||
|
@ -723,21 +723,25 @@ bool imwrite( const String& filename, InputArray _img,
|
|||||||
static bool
|
static bool
|
||||||
imdecode_( const Mat& buf, int flags, Mat& mat )
|
imdecode_( const Mat& buf, int flags, Mat& mat )
|
||||||
{
|
{
|
||||||
CV_Assert(!buf.empty() && buf.isContinuous());
|
CV_Assert(!buf.empty());
|
||||||
|
CV_Assert(buf.isContinuous());
|
||||||
|
CV_Assert(buf.checkVector(1, CV_8U) > 0);
|
||||||
|
Mat buf_row = buf.reshape(1, 1); // decoders expects single row, avoid issues with vector columns
|
||||||
|
|
||||||
String filename;
|
String filename;
|
||||||
|
|
||||||
ImageDecoder decoder = findDecoder(buf);
|
ImageDecoder decoder = findDecoder(buf_row);
|
||||||
if( !decoder )
|
if( !decoder )
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
if( !decoder->setSource(buf) )
|
if( !decoder->setSource(buf_row) )
|
||||||
{
|
{
|
||||||
filename = tempfile();
|
filename = tempfile();
|
||||||
FILE* f = fopen( filename.c_str(), "wb" );
|
FILE* f = fopen( filename.c_str(), "wb" );
|
||||||
if( !f )
|
if( !f )
|
||||||
return 0;
|
return 0;
|
||||||
size_t bufSize = buf.cols*buf.rows*buf.elemSize();
|
size_t bufSize = buf_row.total()*buf.elemSize();
|
||||||
if( fwrite( buf.ptr(), 1, bufSize, f ) != bufSize )
|
if (fwrite(buf_row.ptr(), 1, bufSize, f) != bufSize)
|
||||||
{
|
{
|
||||||
fclose( f );
|
fclose( f );
|
||||||
CV_Error( Error::StsError, "failed to write image data to temporary file" );
|
CV_Error( Error::StsError, "failed to write image data to temporary file" );
|
||||||
|
@ -294,14 +294,14 @@ OCL_TEST_P(CvtColor8u, GRAY2BGR555) { performTest(1, 2, CVTCODE(GRAY2BGR555)); }
|
|||||||
|
|
||||||
// RGBA <-> mRGBA
|
// RGBA <-> mRGBA
|
||||||
|
|
||||||
#ifdef HAVE_IPP
|
#if defined(HAVE_IPP) || defined(__arm__)
|
||||||
#define IPP_EPS depth <= CV_32S ? 1 : 1e-3
|
#define IPP_EPS depth <= CV_32S ? 1 : 1e-3
|
||||||
#else
|
#else
|
||||||
#define IPP_EPS 1e-3
|
#define IPP_EPS 1e-3
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
OCL_TEST_P(CvtColor8u, RGBA2mRGBA) { performTest(4, 4, CVTCODE(RGBA2mRGBA), IPP_EPS); }
|
OCL_TEST_P(CvtColor8u, RGBA2mRGBA) { performTest(4, 4, CVTCODE(RGBA2mRGBA), IPP_EPS); }
|
||||||
OCL_TEST_P(CvtColor8u, mRGBA2RGBA) { performTest(4, 4, CVTCODE(mRGBA2RGBA)); }
|
OCL_TEST_P(CvtColor8u, mRGBA2RGBA) { performTest(4, 4, CVTCODE(mRGBA2RGBA), IPP_EPS); }
|
||||||
|
|
||||||
// RGB <-> Lab
|
// RGB <-> Lab
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ package org.opencv.android;
|
|||||||
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
import android.annotation.TargetApi;
|
import android.annotation.TargetApi;
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
@ -24,6 +25,7 @@ import android.view.ViewGroup.LayoutParams;
|
|||||||
|
|
||||||
import org.opencv.core.CvType;
|
import org.opencv.core.CvType;
|
||||||
import org.opencv.core.Mat;
|
import org.opencv.core.Mat;
|
||||||
|
import org.opencv.core.Size;
|
||||||
import org.opencv.imgproc.Imgproc;
|
import org.opencv.imgproc.Imgproc;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -248,6 +250,20 @@ public class JavaCamera2View extends CameraBridgeViewBase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static class JavaCameraSizeAccessor implements ListItemAccessor {
|
||||||
|
@Override
|
||||||
|
public int getWidth(Object obj) {
|
||||||
|
android.util.Size size = (android.util.Size)obj;
|
||||||
|
return size.getWidth();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getHeight(Object obj) {
|
||||||
|
android.util.Size size = (android.util.Size)obj;
|
||||||
|
return size.getHeight();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
boolean calcPreviewSize(final int width, final int height) {
|
boolean calcPreviewSize(final int width, final int height) {
|
||||||
Log.i(LOGTAG, "calcPreviewSize: " + width + "x" + height);
|
Log.i(LOGTAG, "calcPreviewSize: " + width + "x" + height);
|
||||||
if (mCameraID == null) {
|
if (mCameraID == null) {
|
||||||
@ -258,26 +274,15 @@ public class JavaCamera2View extends CameraBridgeViewBase {
|
|||||||
try {
|
try {
|
||||||
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraID);
|
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraID);
|
||||||
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
||||||
int bestWidth = 0, bestHeight = 0;
|
|
||||||
float aspect = (float) width / height;
|
|
||||||
android.util.Size[] sizes = map.getOutputSizes(ImageReader.class);
|
android.util.Size[] sizes = map.getOutputSizes(ImageReader.class);
|
||||||
bestWidth = sizes[0].getWidth();
|
List<android.util.Size> sizes_list = Arrays.asList(sizes);
|
||||||
bestHeight = sizes[0].getHeight();
|
Size frameSize = calculateCameraFrameSize(sizes_list, new JavaCameraSizeAccessor(), width, height);
|
||||||
for (android.util.Size sz : sizes) {
|
Log.i(LOGTAG, "Selected preview size to " + Integer.valueOf((int)frameSize.width) + "x" + Integer.valueOf((int)frameSize.height));
|
||||||
int w = sz.getWidth(), h = sz.getHeight();
|
assert(!(frameSize.width == 0 || frameSize.height == 0));
|
||||||
Log.d(LOGTAG, "trying size: " + w + "x" + h);
|
if (mPreviewSize.getWidth() == frameSize.width && mPreviewSize.getHeight() == frameSize.height)
|
||||||
if (width >= w && height >= h && bestWidth <= w && bestHeight <= h
|
|
||||||
&& Math.abs(aspect - (float) w / h) < 0.2) {
|
|
||||||
bestWidth = w;
|
|
||||||
bestHeight = h;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Log.i(LOGTAG, "best size: " + bestWidth + "x" + bestHeight);
|
|
||||||
assert(!(bestWidth == 0 || bestHeight == 0));
|
|
||||||
if (mPreviewSize.getWidth() == bestWidth && mPreviewSize.getHeight() == bestHeight)
|
|
||||||
return false;
|
return false;
|
||||||
else {
|
else {
|
||||||
mPreviewSize = new android.util.Size(bestWidth, bestHeight);
|
mPreviewSize = new android.util.Size((int)frameSize.width, (int)frameSize.height);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
} catch (CameraAccessException e) {
|
} catch (CameraAccessException e) {
|
||||||
|
@ -30,7 +30,7 @@ import android.view.SurfaceView;
|
|||||||
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
|
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
|
||||||
|
|
||||||
private static final String TAG = "CameraBridge";
|
private static final String TAG = "CameraBridge";
|
||||||
private static final int MAX_UNSPECIFIED = -1;
|
protected static final int MAX_UNSPECIFIED = -1;
|
||||||
private static final int STOPPED = 0;
|
private static final int STOPPED = 0;
|
||||||
private static final int STARTED = 1;
|
private static final int STARTED = 1;
|
||||||
|
|
||||||
@ -481,6 +481,7 @@ public abstract class CameraBridgeViewBase extends SurfaceView implements Surfac
|
|||||||
for (Object size : supportedSizes) {
|
for (Object size : supportedSizes) {
|
||||||
int width = accessor.getWidth(size);
|
int width = accessor.getWidth(size);
|
||||||
int height = accessor.getHeight(size);
|
int height = accessor.getHeight(size);
|
||||||
|
Log.d(TAG, "trying size: " + width + "x" + height);
|
||||||
|
|
||||||
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
|
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
|
||||||
if (width >= calcWidth && height >= calcHeight) {
|
if (width >= calcWidth && height >= calcHeight) {
|
||||||
@ -489,6 +490,13 @@ public abstract class CameraBridgeViewBase extends SurfaceView implements Surfac
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if ((calcWidth == 0 || calcHeight == 0) && supportedSizes.size() > 0)
|
||||||
|
{
|
||||||
|
Log.i(TAG, "fallback to the first frame size");
|
||||||
|
Object size = supportedSizes.get(0);
|
||||||
|
calcWidth = accessor.getWidth(size);
|
||||||
|
calcHeight = accessor.getHeight(size);
|
||||||
|
}
|
||||||
|
|
||||||
return new Size(calcWidth, calcHeight);
|
return new Size(calcWidth, calcHeight);
|
||||||
}
|
}
|
||||||
|
@ -500,7 +500,8 @@ bool CvCaptureCAM_V4L::autosetup_capture_mode_v4l2()
|
|||||||
V4L2_PIX_FMT_JPEG,
|
V4L2_PIX_FMT_JPEG,
|
||||||
#endif
|
#endif
|
||||||
V4L2_PIX_FMT_Y16,
|
V4L2_PIX_FMT_Y16,
|
||||||
V4L2_PIX_FMT_GREY
|
V4L2_PIX_FMT_Y10,
|
||||||
|
V4L2_PIX_FMT_GREY,
|
||||||
};
|
};
|
||||||
|
|
||||||
for (size_t i = 0; i < sizeof(try_order) / sizeof(__u32); i++) {
|
for (size_t i = 0; i < sizeof(try_order) / sizeof(__u32); i++) {
|
||||||
@ -547,6 +548,7 @@ bool CvCaptureCAM_V4L::convertableToRgb() const
|
|||||||
case V4L2_PIX_FMT_SGBRG8:
|
case V4L2_PIX_FMT_SGBRG8:
|
||||||
case V4L2_PIX_FMT_RGB24:
|
case V4L2_PIX_FMT_RGB24:
|
||||||
case V4L2_PIX_FMT_Y16:
|
case V4L2_PIX_FMT_Y16:
|
||||||
|
case V4L2_PIX_FMT_Y10:
|
||||||
case V4L2_PIX_FMT_GREY:
|
case V4L2_PIX_FMT_GREY:
|
||||||
case V4L2_PIX_FMT_BGR24:
|
case V4L2_PIX_FMT_BGR24:
|
||||||
return true;
|
return true;
|
||||||
@ -581,6 +583,7 @@ void CvCaptureCAM_V4L::v4l2_create_frame()
|
|||||||
size.height = size.height * 3 / 2; // "1.5" channels
|
size.height = size.height * 3 / 2; // "1.5" channels
|
||||||
break;
|
break;
|
||||||
case V4L2_PIX_FMT_Y16:
|
case V4L2_PIX_FMT_Y16:
|
||||||
|
case V4L2_PIX_FMT_Y10:
|
||||||
depth = IPL_DEPTH_16U;
|
depth = IPL_DEPTH_16U;
|
||||||
/* fallthru */
|
/* fallthru */
|
||||||
case V4L2_PIX_FMT_GREY:
|
case V4L2_PIX_FMT_GREY:
|
||||||
@ -1455,6 +1458,13 @@ void CvCaptureCAM_V4L::convertToRgb(const Buffer ¤tBuffer)
|
|||||||
cv::cvtColor(temp, destination, COLOR_GRAY2BGR);
|
cv::cvtColor(temp, destination, COLOR_GRAY2BGR);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
case V4L2_PIX_FMT_Y10:
|
||||||
|
{
|
||||||
|
cv::Mat temp(imageSize, CV_8UC1, buffers[MAX_V4L_BUFFERS].start);
|
||||||
|
cv::Mat(imageSize, CV_16UC1, currentBuffer.start).convertTo(temp, CV_8U, 1.0 / 4);
|
||||||
|
cv::cvtColor(temp, destination, COLOR_GRAY2BGR);
|
||||||
|
return;
|
||||||
|
}
|
||||||
case V4L2_PIX_FMT_GREY:
|
case V4L2_PIX_FMT_GREY:
|
||||||
cv::cvtColor(cv::Mat(imageSize, CV_8UC1, currentBuffer.start), destination, COLOR_GRAY2BGR);
|
cv::cvtColor(cv::Mat(imageSize, CV_8UC1, currentBuffer.start), destination, COLOR_GRAY2BGR);
|
||||||
break;
|
break;
|
||||||
|
Loading…
Reference in New Issue
Block a user