mirror of
https://github.com/opencv/opencv.git
synced 2024-11-28 05:06:29 +08:00
Merge pull request #9676 from jrobble:fix_caffe_swaprb
This commit is contained in:
commit
aea25e7f90
@ -80,12 +80,7 @@ TEST(Reproducibility_AlexNet, Accuracy)
|
||||
Mat sample = imread(_tf("grace_hopper_227.png"));
|
||||
ASSERT_TRUE(!sample.empty());
|
||||
|
||||
Size inputSize(227, 227);
|
||||
|
||||
if (sample.size() != inputSize)
|
||||
resize(sample, sample, inputSize);
|
||||
|
||||
net.setInput(blobFromImage(sample), "data");
|
||||
net.setInput(blobFromImage(sample, 1.0f, Size(227, 227), Scalar(), false), "data");
|
||||
Mat out = net.forward("prob");
|
||||
Mat ref = blobFromNPY(_tf("caffe_alexnet_prob.npy"));
|
||||
normAssert(ref, out);
|
||||
@ -105,17 +100,17 @@ TEST(Reproducibility_FCN, Accuracy)
|
||||
Mat sample = imread(_tf("street.png"));
|
||||
ASSERT_TRUE(!sample.empty());
|
||||
|
||||
Size inputSize(500, 500);
|
||||
if (sample.size() != inputSize)
|
||||
resize(sample, sample, inputSize);
|
||||
|
||||
std::vector<int> layerIds;
|
||||
std::vector<size_t> weights, blobs;
|
||||
net.getMemoryConsumption(shape(1,3,227,227), layerIds, weights, blobs);
|
||||
|
||||
net.setInput(blobFromImage(sample), "data");
|
||||
net.setInput(blobFromImage(sample, 1.0f, Size(500, 500), Scalar(), false), "data");
|
||||
Mat out = net.forward("score");
|
||||
Mat ref = blobFromNPY(_tf("caffe_fcn8s_prob.npy"));
|
||||
|
||||
Mat refData = imread(_tf("caffe_fcn8s_prob.png"), IMREAD_ANYDEPTH);
|
||||
int shape[] = {1, 21, 500, 500};
|
||||
Mat ref(4, shape, CV_32FC1, refData.data);
|
||||
|
||||
normAssert(ref, out);
|
||||
}
|
||||
#endif
|
||||
@ -136,10 +131,7 @@ TEST(Reproducibility_SSD, Accuracy)
|
||||
if (sample.channels() == 4)
|
||||
cvtColor(sample, sample, COLOR_BGRA2BGR);
|
||||
|
||||
sample.convertTo(sample, CV_32F);
|
||||
resize(sample, sample, Size(300, 300));
|
||||
|
||||
Mat in_blob = blobFromImage(sample);
|
||||
Mat in_blob = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
|
||||
net.setInput(in_blob, "data");
|
||||
Mat out = net.forward("detection_out");
|
||||
|
||||
@ -152,7 +144,7 @@ TEST(Reproducibility_ResNet50, Accuracy)
|
||||
Net net = readNetFromCaffe(findDataFile("dnn/ResNet-50-deploy.prototxt", false),
|
||||
findDataFile("dnn/ResNet-50-model.caffemodel", false));
|
||||
|
||||
Mat input = blobFromImage(imread(_tf("googlenet_0.png")), 1, Size(224,224));
|
||||
Mat input = blobFromImage(imread(_tf("googlenet_0.png")), 1.0f, Size(224,224), Scalar(), false);
|
||||
ASSERT_TRUE(!input.empty());
|
||||
|
||||
net.setInput(input);
|
||||
@ -167,7 +159,7 @@ TEST(Reproducibility_SqueezeNet_v1_1, Accuracy)
|
||||
Net net = readNetFromCaffe(findDataFile("dnn/squeezenet_v1.1.prototxt", false),
|
||||
findDataFile("dnn/squeezenet_v1.1.caffemodel", false));
|
||||
|
||||
Mat input = blobFromImage(imread(_tf("googlenet_0.png")), 1, Size(227,227));
|
||||
Mat input = blobFromImage(imread(_tf("googlenet_0.png")), 1.0f, Size(227,227), Scalar(), false);
|
||||
ASSERT_TRUE(!input.empty());
|
||||
|
||||
net.setInput(input);
|
||||
@ -180,7 +172,7 @@ TEST(Reproducibility_SqueezeNet_v1_1, Accuracy)
|
||||
TEST(Reproducibility_AlexNet_fp16, Accuracy)
|
||||
{
|
||||
const float l1 = 1e-5;
|
||||
const float lInf = 2e-4;
|
||||
const float lInf = 3e-3;
|
||||
|
||||
const string proto = findDataFile("dnn/bvlc_alexnet.prototxt", false);
|
||||
const string model = findDataFile("dnn/bvlc_alexnet.caffemodel", false);
|
||||
@ -190,7 +182,7 @@ TEST(Reproducibility_AlexNet_fp16, Accuracy)
|
||||
|
||||
Mat sample = imread(findDataFile("dnn/grace_hopper_227.png", false));
|
||||
|
||||
net.setInput(blobFromImage(sample, 1, Size(227, 227)));
|
||||
net.setInput(blobFromImage(sample, 1.0f, Size(227, 227), Scalar(), false));
|
||||
Mat out = net.forward();
|
||||
Mat ref = blobFromNPY(findDataFile("dnn/caffe_alexnet_prob.npy", false));
|
||||
normAssert(ref, out, "", l1, lInf);
|
||||
@ -212,7 +204,7 @@ TEST(Reproducibility_GoogLeNet_fp16, Accuracy)
|
||||
inpMats.push_back( imread(_tf("googlenet_1.png")) );
|
||||
ASSERT_TRUE(!inpMats[0].empty() && !inpMats[1].empty());
|
||||
|
||||
net.setInput(blobFromImages(inpMats), "data");
|
||||
net.setInput(blobFromImages(inpMats, 1.0f, Size(), Scalar(), false), "data");
|
||||
Mat out = net.forward("prob");
|
||||
|
||||
Mat ref = blobFromNPY(_tf("googlenet_prob.npy"));
|
||||
|
@ -66,7 +66,7 @@ TEST(Reproducibility_GoogLeNet, Accuracy)
|
||||
inpMats.push_back( imread(_tf("googlenet_1.png")) );
|
||||
ASSERT_TRUE(!inpMats[0].empty() && !inpMats[1].empty());
|
||||
|
||||
net.setInput(blobFromImages(inpMats), "data");
|
||||
net.setInput(blobFromImages(inpMats, 1.0f, Size(), Scalar(), false), "data");
|
||||
Mat out = net.forward("prob");
|
||||
|
||||
Mat ref = blobFromNPY(_tf("googlenet_prob.npy"));
|
||||
@ -84,7 +84,7 @@ TEST(IntermediateBlobs_GoogLeNet, Accuracy)
|
||||
blobsNames.push_back("inception_4c/1x1");
|
||||
blobsNames.push_back("inception_4c/relu_1x1");
|
||||
std::vector<Mat> outs;
|
||||
Mat in = blobFromImage(imread(_tf("googlenet_0.png")));
|
||||
Mat in = blobFromImage(imread(_tf("googlenet_0.png")), 1.0f, Size(), Scalar(), false);
|
||||
net.setInput(in, "data");
|
||||
net.forward(outs, blobsNames);
|
||||
CV_Assert(outs.size() == blobsNames.size());
|
||||
@ -109,7 +109,7 @@ TEST(SeveralCalls_GoogLeNet, Accuracy)
|
||||
inpMats.push_back( imread(_tf("googlenet_1.png")) );
|
||||
ASSERT_TRUE(!inpMats[0].empty() && !inpMats[1].empty());
|
||||
|
||||
net.setInput(blobFromImages(inpMats), "data");
|
||||
net.setInput(blobFromImages(inpMats, 1.0f, Size(), Scalar(), false), "data");
|
||||
Mat out = net.forward();
|
||||
|
||||
Mat ref = blobFromNPY(_tf("googlenet_prob.npy"));
|
||||
@ -118,7 +118,7 @@ TEST(SeveralCalls_GoogLeNet, Accuracy)
|
||||
std::vector<String> blobsNames;
|
||||
blobsNames.push_back("conv1/7x7_s2");
|
||||
std::vector<Mat> outs;
|
||||
Mat in = blobFromImage(inpMats[0]);
|
||||
Mat in = blobFromImage(inpMats[0], 1.0f, Size(), Scalar(), false);
|
||||
net.setInput(in, "data");
|
||||
net.forward(outs, blobsNames);
|
||||
CV_Assert(outs.size() == blobsNames.size());
|
||||
|
@ -86,7 +86,7 @@ public class MainActivity extends AppCompatActivity implements CvCameraViewListe
|
||||
// Forward image through network.
|
||||
Mat blob = Dnn.blobFromImage(frame, IN_SCALE_FACTOR,
|
||||
new Size(IN_WIDTH, IN_HEIGHT),
|
||||
new Scalar(MEAN_VAL, MEAN_VAL, MEAN_VAL), true);
|
||||
new Scalar(MEAN_VAL, MEAN_VAL, MEAN_VAL), false);
|
||||
net.setInput(blob);
|
||||
Mat detections = net.forward();
|
||||
|
||||
|
@ -91,21 +91,26 @@ int main(int argc, char **argv)
|
||||
String modelBin = "bvlc_googlenet.caffemodel";
|
||||
String imageFile = (argc > 1) ? argv[1] : "space_shuttle.jpg";
|
||||
|
||||
//! [Read and initialize network]
|
||||
Net net = dnn::readNetFromCaffe(modelTxt, modelBin);
|
||||
//! [Read and initialize network]
|
||||
|
||||
//! [Check that network was read successfully]
|
||||
if (net.empty())
|
||||
{
|
||||
std::cerr << "Can't load network by using the following files: " << std::endl;
|
||||
std::cerr << "prototxt: " << modelTxt << std::endl;
|
||||
std::cerr << "caffemodel: " << modelBin << std::endl;
|
||||
std::cerr << "bvlc_googlenet.caffemodel can be downloaded here:" << std::endl;
|
||||
std::cerr << "http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel" << std::endl;
|
||||
exit(-1);
|
||||
Net net;
|
||||
try {
|
||||
//! [Read and initialize network]
|
||||
net = dnn::readNetFromCaffe(modelTxt, modelBin);
|
||||
//! [Read and initialize network]
|
||||
}
|
||||
catch (cv::Exception& e) {
|
||||
std::cerr << "Exception: " << e.what() << std::endl;
|
||||
//! [Check that network was read successfully]
|
||||
if (net.empty())
|
||||
{
|
||||
std::cerr << "Can't load network by using the following files: " << std::endl;
|
||||
std::cerr << "prototxt: " << modelTxt << std::endl;
|
||||
std::cerr << "caffemodel: " << modelBin << std::endl;
|
||||
std::cerr << "bvlc_googlenet.caffemodel can be downloaded here:" << std::endl;
|
||||
std::cerr << "http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel" << std::endl;
|
||||
exit(-1);
|
||||
}
|
||||
//! [Check that network was read successfully]
|
||||
}
|
||||
//! [Check that network was read successfully]
|
||||
|
||||
//! [Prepare blob]
|
||||
Mat img = imread(imageFile);
|
||||
@ -115,9 +120,9 @@ int main(int argc, char **argv)
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
//GoogLeNet accepts only 224x224 RGB-images
|
||||
Mat inputBlob = blobFromImage(img, 1, Size(224, 224),
|
||||
Scalar(104, 117, 123)); //Convert Mat to batch of images
|
||||
//GoogLeNet accepts only 224x224 BGR-images
|
||||
Mat inputBlob = blobFromImage(img, 1.0f, Size(224, 224),
|
||||
Scalar(104, 117, 123), false); //Convert Mat to batch of images
|
||||
//! [Prepare blob]
|
||||
|
||||
Mat prob;
|
||||
|
@ -113,8 +113,8 @@ int main(int argc, char **argv)
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
resize(img, img, Size(500, 500)); //FCN accepts 500x500 RGB-images
|
||||
Mat inputBlob = blobFromImage(img); //Convert Mat to batch of images
|
||||
resize(img, img, Size(500, 500)); //FCN accepts 500x500 BGR-images
|
||||
Mat inputBlob = blobFromImage(img, 1, Size(), Scalar(), false); //Convert Mat to batch of images
|
||||
//! [Prepare blob]
|
||||
|
||||
//! [Set input blob]
|
||||
|
@ -11,7 +11,7 @@ def get_class_list():
|
||||
with open('synset_words.txt', 'rt') as f:
|
||||
return [x[x.find(" ") + 1:] for x in f]
|
||||
|
||||
blob = dnn.blobFromImage(cv2.imread('space_shuttle.jpg'), 1, (224, 224), (104, 117, 123))
|
||||
blob = dnn.blobFromImage(cv2.imread('space_shuttle.jpg'), 1, (224, 224), (104, 117, 123), false)
|
||||
print("Input:", blob.shape, blob.dtype)
|
||||
|
||||
net = dnn.readNetFromCaffe('bvlc_googlenet.prototxt', 'bvlc_googlenet.caffemodel')
|
||||
|
@ -41,7 +41,7 @@ if __name__ == "__main__":
|
||||
while True:
|
||||
# Capture frame-by-frame
|
||||
ret, frame = cap.read()
|
||||
blob = cv.dnn.blobFromImage(frame, inScaleFactor, (inWidth, inHeight), meanVal)
|
||||
blob = cv.dnn.blobFromImage(frame, inScaleFactor, (inWidth, inHeight), meanVal, false)
|
||||
net.setInput(blob)
|
||||
detections = net.forward()
|
||||
|
||||
|
@ -27,8 +27,7 @@ if __name__ == '__main__':
|
||||
cols = frame.shape[1]
|
||||
rows = frame.shape[0]
|
||||
|
||||
net.setInput(dnn.blobFromImage(cv.resize(frame, (inWidth, inHeight)),
|
||||
1.0, (inWidth, inHeight), (104., 177., 123.)))
|
||||
net.setInput(dnn.blobFromImage(frame, 1.0, (inWidth, inHeight), (104.0, 177.0, 123.0), false))
|
||||
detections = net.forward()
|
||||
|
||||
perf_stats = net.getPerfProfile()
|
||||
|
@ -97,7 +97,7 @@ int main(int argc, char** argv)
|
||||
//! [Prepare blob]
|
||||
|
||||
Mat inputBlob = blobFromImage(frame, inScaleFactor,
|
||||
Size(inWidth, inHeight), meanVal); //Convert Mat to batch of images
|
||||
Size(inWidth, inHeight), meanVal, false); //Convert Mat to batch of images
|
||||
//! [Prepare blob]
|
||||
|
||||
//! [Set input blob]
|
||||
|
@ -86,7 +86,7 @@ int main(int argc, char** argv)
|
||||
//! [Prepare blob]
|
||||
Mat preprocessedFrame = preprocess(frame);
|
||||
|
||||
Mat inputBlob = blobFromImage(preprocessedFrame); //Convert Mat to batch of images
|
||||
Mat inputBlob = blobFromImage(preprocessedFrame, 1.0f, Size(), Scalar(), false); //Convert Mat to batch of images
|
||||
//! [Prepare blob]
|
||||
|
||||
//! [Set input blob]
|
||||
|
Loading…
Reference in New Issue
Block a user