mirror of
https://github.com/opencv/opencv.git
synced 2025-07-29 00:33:40 +08:00
Fix prior box generation in case of squared proposals.
Fix batch norm in training phase.
This commit is contained in:
parent
0366c1b093
commit
e8fe6ee4e3
@ -335,6 +335,28 @@ public:
|
|||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
else if (type == "BatchNorm")
|
||||||
|
{
|
||||||
|
if (!layerParams.get<bool>("use_global_stats", true))
|
||||||
|
{
|
||||||
|
CV_Assert(layer.bottom_size() == 1, layer.top_size() == 1);
|
||||||
|
|
||||||
|
LayerParams mvnParams;
|
||||||
|
mvnParams.set("eps", layerParams.get<float>("eps", 1e-5));
|
||||||
|
std::string mvnName = name + "/mvn";
|
||||||
|
|
||||||
|
int repetitions = layerCounter[mvnName]++;
|
||||||
|
if (repetitions)
|
||||||
|
mvnName += String("_") + toString(repetitions);
|
||||||
|
|
||||||
|
int mvnId = dstNet.addLayer(mvnName, "MVN", mvnParams);
|
||||||
|
addInput(layer.bottom(0), mvnId, 0, dstNet);
|
||||||
|
addOutput(layer, mvnId, 0);
|
||||||
|
net.mutable_layer(li)->set_bottom(0, layer.top(0));
|
||||||
|
layerParams.blobs[0].setTo(0); // mean
|
||||||
|
layerParams.blobs[1].setTo(1); // std
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
int id = dstNet.addLayer(name, type, layerParams);
|
int id = dstNet.addLayer(name, type, layerParams);
|
||||||
|
|
||||||
|
@ -36,6 +36,7 @@ public:
|
|||||||
|
|
||||||
hasWeights = params.get<bool>("has_weight", false);
|
hasWeights = params.get<bool>("has_weight", false);
|
||||||
hasBias = params.get<bool>("has_bias", false);
|
hasBias = params.get<bool>("has_bias", false);
|
||||||
|
useGlobalStats = params.get<bool>("use_global_stats", true);
|
||||||
if(params.get<bool>("scale_bias", false))
|
if(params.get<bool>("scale_bias", false))
|
||||||
hasWeights = hasBias = true;
|
hasWeights = hasBias = true;
|
||||||
epsilon = params.get<float>("eps", 1E-5);
|
epsilon = params.get<float>("eps", 1E-5);
|
||||||
@ -46,7 +47,7 @@ public:
|
|||||||
blobs[0].type() == CV_32F && blobs[1].type() == CV_32F);
|
blobs[0].type() == CV_32F && blobs[1].type() == CV_32F);
|
||||||
|
|
||||||
float varMeanScale = 1.f;
|
float varMeanScale = 1.f;
|
||||||
if (!hasWeights && !hasBias && blobs.size() > 2) {
|
if (!hasWeights && !hasBias && blobs.size() > 2 && useGlobalStats) {
|
||||||
CV_Assert(blobs.size() == 3, blobs[2].type() == CV_32F);
|
CV_Assert(blobs.size() == 3, blobs[2].type() == CV_32F);
|
||||||
varMeanScale = blobs[2].at<float>(0);
|
varMeanScale = blobs[2].at<float>(0);
|
||||||
if (varMeanScale != 0)
|
if (varMeanScale != 0)
|
||||||
@ -100,6 +101,8 @@ public:
|
|||||||
std::vector<MatShape> &outputs,
|
std::vector<MatShape> &outputs,
|
||||||
std::vector<MatShape> &internals) const
|
std::vector<MatShape> &internals) const
|
||||||
{
|
{
|
||||||
|
if (!useGlobalStats && inputs[0][0] != 1)
|
||||||
|
CV_Error(Error::StsNotImplemented, "Batch normalization in training mode with batch size > 1");
|
||||||
Layer::getMemoryShapes(inputs, requiredOutputs, outputs, internals);
|
Layer::getMemoryShapes(inputs, requiredOutputs, outputs, internals);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -304,6 +307,9 @@ public:
|
|||||||
}
|
}
|
||||||
return flops;
|
return flops;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
bool useGlobalStats;
|
||||||
};
|
};
|
||||||
|
|
||||||
Ptr<BatchNormLayer> BatchNormLayer::create(const LayerParams& params)
|
Ptr<BatchNormLayer> BatchNormLayer::create(const LayerParams& params)
|
||||||
|
@ -109,15 +109,11 @@ public:
|
|||||||
for (int i = 0; i < aspectRatioParameter.size(); ++i)
|
for (int i = 0; i < aspectRatioParameter.size(); ++i)
|
||||||
{
|
{
|
||||||
float aspectRatio = aspectRatioParameter.get<float>(i);
|
float aspectRatio = aspectRatioParameter.get<float>(i);
|
||||||
bool alreadyExists = false;
|
bool alreadyExists = fabs(aspectRatio - 1.f) < 1e-6f;
|
||||||
|
|
||||||
for (size_t j = 0; j < _aspectRatios.size(); ++j)
|
for (size_t j = 0; j < _aspectRatios.size() && !alreadyExists; ++j)
|
||||||
{
|
{
|
||||||
if (fabs(aspectRatio - _aspectRatios[j]) < 1e-6)
|
alreadyExists = fabs(aspectRatio - _aspectRatios[j]) < 1e-6;
|
||||||
{
|
|
||||||
alreadyExists = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (!alreadyExists)
|
if (!alreadyExists)
|
||||||
{
|
{
|
||||||
@ -215,7 +211,7 @@ public:
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
CV_Assert(!_aspectRatios.empty(), _minSize > 0);
|
CV_Assert(_minSize > 0);
|
||||||
_boxWidths.resize(1 + (_maxSize > 0 ? 1 : 0) + _aspectRatios.size());
|
_boxWidths.resize(1 + (_maxSize > 0 ? 1 : 0) + _aspectRatios.size());
|
||||||
_boxHeights.resize(_boxWidths.size());
|
_boxHeights.resize(_boxWidths.size());
|
||||||
_boxWidths[0] = _boxHeights[0] = _minSize;
|
_boxWidths[0] = _boxHeights[0] = _minSize;
|
||||||
@ -492,10 +488,12 @@ public:
|
|||||||
ieLayer->params["min_size"] = format("%f", _minSize);
|
ieLayer->params["min_size"] = format("%f", _minSize);
|
||||||
ieLayer->params["max_size"] = _maxSize > 0 ? format("%f", _maxSize) : "";
|
ieLayer->params["max_size"] = _maxSize > 0 ? format("%f", _maxSize) : "";
|
||||||
|
|
||||||
CV_Assert(!_aspectRatios.empty());
|
if (!_aspectRatios.empty())
|
||||||
|
{
|
||||||
ieLayer->params["aspect_ratio"] = format("%f", _aspectRatios[0]);
|
ieLayer->params["aspect_ratio"] = format("%f", _aspectRatios[0]);
|
||||||
for (int i = 1; i < _aspectRatios.size(); ++i)
|
for (int i = 1; i < _aspectRatios.size(); ++i)
|
||||||
ieLayer->params["aspect_ratio"] += format(",%f", _aspectRatios[i]);
|
ieLayer->params["aspect_ratio"] += format(",%f", _aspectRatios[i]);
|
||||||
|
}
|
||||||
|
|
||||||
ieLayer->params["flip"] = _flip ? "1" : "0";
|
ieLayer->params["flip"] = _flip ? "1" : "0";
|
||||||
ieLayer->params["clip"] = _clip ? "1" : "0";
|
ieLayer->params["clip"] = _clip ? "1" : "0";
|
||||||
|
@ -252,6 +252,11 @@ TEST(Layer_Test_BatchNorm, Accuracy)
|
|||||||
testLayerUsingCaffeModels("layer_batch_norm", DNN_TARGET_CPU, true);
|
testLayerUsingCaffeModels("layer_batch_norm", DNN_TARGET_CPU, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST(Layer_Test_BatchNorm, local_stats)
|
||||||
|
{
|
||||||
|
testLayerUsingCaffeModels("layer_batch_norm_local_stats", DNN_TARGET_CPU, true, false);
|
||||||
|
}
|
||||||
|
|
||||||
TEST(Layer_Test_ReLU, Accuracy)
|
TEST(Layer_Test_ReLU, Accuracy)
|
||||||
{
|
{
|
||||||
testLayerUsingCaffeModels("layer_relu");
|
testLayerUsingCaffeModels("layer_relu");
|
||||||
@ -831,4 +836,33 @@ TEST(Layer_Test_Average_pooling_kernel_area, Accuracy)
|
|||||||
normAssert(out, blobFromImage(target));
|
normAssert(out, blobFromImage(target));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Test PriorBoxLayer in case of no aspect ratios (just squared proposals).
|
||||||
|
TEST(Layer_PriorBox, squares)
|
||||||
|
{
|
||||||
|
LayerParams lp;
|
||||||
|
lp.name = "testPriorBox";
|
||||||
|
lp.type = "PriorBox";
|
||||||
|
lp.set("min_size", 32);
|
||||||
|
lp.set("flip", true);
|
||||||
|
lp.set("clip", true);
|
||||||
|
float variance[] = {0.1f, 0.1f, 0.2f, 0.2f};
|
||||||
|
float aspectRatios[] = {1.0f}; // That should be ignored.
|
||||||
|
lp.set("variance", DictValue::arrayReal<float*>(&variance[0], 4));
|
||||||
|
lp.set("aspect_ratio", DictValue::arrayReal<float*>(&aspectRatios[0], 1));
|
||||||
|
|
||||||
|
Net net;
|
||||||
|
int id = net.addLayerToPrev(lp.name, lp.type, lp);
|
||||||
|
net.connect(0, 0, id, 1); // The second input is an input image. Shapes are used for boxes normalization.
|
||||||
|
Mat inp(1, 2, CV_32F);
|
||||||
|
randu(inp, -1, 1);
|
||||||
|
net.setInput(blobFromImage(inp));
|
||||||
|
Mat out = net.forward();
|
||||||
|
|
||||||
|
Mat target = (Mat_<float>(4, 4) << -7.75f, -15.5f, 8.25f, 16.5f,
|
||||||
|
-7.25f, -15.5f, 8.75f, 16.5f,
|
||||||
|
0.1f, 0.1f, 0.2f, 0.2f,
|
||||||
|
0.1f, 0.1f, 0.2f, 0.2f);
|
||||||
|
normAssert(out.reshape(1, 4), target);
|
||||||
|
}
|
||||||
|
|
||||||
}} // namespace
|
}} // namespace
|
||||||
|
Loading…
Reference in New Issue
Block a user