Fix LogSoftmax for ONNX

Fix wrong indentation as well while at it
This commit is contained in:
dianlujitao 2019-05-20 15:46:09 +08:00
parent 447116a93c
commit f0f50b757d
2 changed files with 32 additions and 21 deletions

View File

@ -786,6 +786,11 @@ void ONNXImporter::populateNet(Net dstNet)
}
replaceLayerParam(layerParams, "mode", "interpolation");
}
else if (layer_type == "LogSoftmax")
{
layerParams.type = "Softmax";
layerParams.set("log_softmax", true);
}
else
{
for (int j = 0; j < node_proto.input_size(); j++) {
@ -816,7 +821,7 @@ void ONNXImporter::populateNet(Net dstNet)
CV_Assert(!layerOutShapes.empty());
outShapes[layerParams.name] = layerOutShapes[0];
}
}
}
Net readNetFromONNX(const String& onnxFile)
{

View File

@ -245,6 +245,12 @@ TEST_P(Test_ONNX_layers, Reshape)
testONNXModels("unsqueeze");
}
TEST_P(Test_ONNX_layers, Softmax)
{
testONNXModels("softmax");
testONNXModels("log_softmax");
}
INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_ONNX_layers, dnnBackendsAndTargets());
class Test_ONNX_nets : public Test_ONNX_layers {};