Make onnx parser to support TanH / Sigmoid / LeakyRelu layers

Signed-off-by: Jung Tae-young tee.ty.jung@openedges.com
Change-Id: I44d24b525b78b8d3fee0197abda7bd667eb04d83
diff --git a/src/armnnOnnxParser/OnnxParser.cpp b/src/armnnOnnxParser/OnnxParser.cpp
index 9d374ae..0d0cc25 100644
--- a/src/armnnOnnxParser/OnnxParser.cpp
+++ b/src/armnnOnnxParser/OnnxParser.cpp
@@ -337,7 +337,10 @@
     { "Constant",              &OnnxParser::ParseConstant },
     { "MaxPool",               &OnnxParser::ParseMaxPool },
     { "Reshape",               &OnnxParser::ParseReshape },
+    { "Sigmoid",               &OnnxParser::ParseSigmoid },
+    { "Tanh",                  &OnnxParser::ParseTanh },
     { "Relu",                  &OnnxParser::ParseRelu },
+    { "LeakyRelu",             &OnnxParser::ParseLeakyRelu },
     { "Conv",                  &OnnxParser::ParseConv },
     { "Add",                   &OnnxParser::ParseAdd },
 };
@@ -1083,7 +1086,7 @@
     }
 }
 
-void OnnxParser::ParseRelu(const onnx::NodeProto& node)
+void OnnxParser::ParseActivation(const onnx::NodeProto& node, const armnn::ActivationFunction func)
 {
     CHECK_VALID_SIZE(static_cast<size_t>(node.input_size()), 1);
     CHECK_VALID_SIZE(static_cast<size_t>(node.output_size()), 1);
@@ -1091,7 +1094,7 @@
     VALID_INPUTS(node, STR_LIST(onnx::TensorProto::FLOAT));
 
     ActivationDescriptor desc;
-    desc.m_Function = ActivationFunction::ReLu;
+    desc.m_Function = func;
 
     IConnectableLayer* const layer = m_Network->AddActivationLayer(desc, node.name().c_str());
     BOOST_ASSERT(layer != nullptr);
@@ -1107,6 +1110,25 @@
     RegisterOutputSlots(layer, {node.output(0)});
 }
 
+void OnnxParser::ParseSigmoid(const onnx::NodeProto& node)
+{
+    ParseActivation(node, ActivationFunction::Sigmoid);
+}
+
+void OnnxParser::ParseTanh(const onnx::NodeProto& node)
+{
+    ParseActivation(node, ActivationFunction::TanH);
+}
+
+void OnnxParser::ParseRelu(const onnx::NodeProto& node)
+{
+    ParseActivation(node, ActivationFunction::ReLu);
+}
+
+void OnnxParser::ParseLeakyRelu(const onnx::NodeProto& node)
+{
+    ParseActivation(node, ActivationFunction::LeakyReLu);
+}
 
 void OnnxParser::AddConvLayerWithDepthwiseConv(const onnx::NodeProto& node, const Convolution2dDescriptor& convDesc)
 {