IVGCVSW-2643 Add Serializer & Deserializer for Activation

 * Added ActivationLayer to Schema.fbs
 * Added Activation serialization and deserialization support
 * Added serialization and deserialization unit tests

Change-Id: Ib5df45f123674988b994ffe3f111d3fb57864912
Signed-off-by: Mike Kelly <mike.kelly@arm.com>
diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp
index 56a6570..2462061 100644
--- a/src/armnnDeserializer/Deserializer.cpp
+++ b/src/armnnDeserializer/Deserializer.cpp
@@ -170,6 +170,7 @@
 m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
 {
     // register supported layers
+    m_ParserFunctions[Layer_ActivationLayer]             = &Deserializer::ParseActivation;
     m_ParserFunctions[Layer_AdditionLayer]               = &Deserializer::ParseAdd;
     m_ParserFunctions[Layer_Convolution2dLayer]          = &Deserializer::ParseConvolution2d;
     m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
@@ -185,6 +186,8 @@
 
     switch(layerType)
     {
+        case Layer::Layer_ActivationLayer:
+            return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
         case Layer::Layer_AdditionLayer:
             return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
         case Layer::Layer_Convolution2dLayer:
@@ -238,6 +241,33 @@
     }
 }
 
+armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
+{
+    switch (function)
+    {
+        case armnnSerializer::ActivationFunction_Sigmoid:
+            return armnn::ActivationFunction::Sigmoid;
+        case armnnSerializer::ActivationFunction_TanH:
+            return armnn::ActivationFunction::TanH;
+        case armnnSerializer::ActivationFunction_Linear:
+            return armnn::ActivationFunction::Linear;
+        case armnnSerializer::ActivationFunction_ReLu:
+            return armnn::ActivationFunction::ReLu;
+        case armnnSerializer::ActivationFunction_BoundedReLu:
+            return armnn::ActivationFunction::BoundedReLu;
+        case armnnSerializer::ActivationFunction_LeakyReLu:
+            return armnn::ActivationFunction::LeakyReLu;
+        case armnnSerializer::ActivationFunction_Abs:
+            return armnn::ActivationFunction::Abs;
+        case armnnSerializer::ActivationFunction_Sqrt:
+            return armnn::ActivationFunction::Sqrt;
+        case armnnSerializer::ActivationFunction_Square:
+            return armnn::ActivationFunction::Square;
+        default:
+            return armnn::ActivationFunction::Sigmoid;
+    }
+}
+
 armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
 {
     armnn::DataType type;
@@ -645,6 +675,35 @@
     slots.outputSlot = slot;
 }
 
+void Deserializer::ParseActivation(unsigned int layerIndex)
+{
+    CHECK_LAYERS(m_Graph, 0, layerIndex);
+    auto inputs = GetInputs(m_Graph, layerIndex);
+    CHECK_LOCATION();
+    CHECK_VALID_SIZE(inputs.size(), 1);
+
+    auto outputs = GetOutputs(m_Graph, layerIndex);
+    CHECK_VALID_SIZE(outputs.size(), 1);
+
+    auto layerName = boost::str(boost::format("Activation:%1%") % layerIndex);
+
+    auto serializerLayer = m_Graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
+    auto serializerDescriptor = serializerLayer->descriptor();
+
+    armnn::ActivationDescriptor descriptor;
+    descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
+    descriptor.m_A = serializerDescriptor->a();
+    descriptor.m_B = serializerDescriptor->b();
+
+    IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
+                                                             layerName.c_str());
+    armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
+    layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
+
+    RegisterInputSlots(layerIndex, layer);
+    RegisterOutputSlots(layerIndex, layer);
+}
+
 void Deserializer::ParseAdd(unsigned int layerIndex)
 {
     CHECK_LAYERS(m_Graph, 0, layerIndex);
diff --git a/src/armnnDeserializer/Deserializer.hpp b/src/armnnDeserializer/Deserializer.hpp
index a66508a..bf78e10 100644
--- a/src/armnnDeserializer/Deserializer.hpp
+++ b/src/armnnDeserializer/Deserializer.hpp
@@ -68,6 +68,7 @@
     using LayerParsingFunction = void(Deserializer::*)(unsigned int layerIndex);
 
     void ParseUnsupportedLayer(unsigned int layerIndex);
+    void ParseActivation(unsigned int layerIndex);
     void ParseAdd(unsigned int layerIndex);
     void ParseConvolution2d(unsigned int layerIndex);
     void ParseDepthwiseConvolution2d(unsigned int layerIndex);
diff --git a/src/armnnDeserializer/test/DeserializeActivation.cpp b/src/armnnDeserializer/test/DeserializeActivation.cpp
new file mode 100644
index 0000000..ad03dd6
--- /dev/null
+++ b/src/armnnDeserializer/test/DeserializeActivation.cpp
@@ -0,0 +1,178 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include <boost/test/unit_test.hpp>
+#include "ParserFlatbuffersSerializeFixture.hpp"
+#include "../Deserializer.hpp"
+
+#include <string>
+#include <iostream>
+
+BOOST_AUTO_TEST_SUITE(DeserializeParser)
+
+struct ActivationFixture : public ParserFlatbuffersSerializeFixture
+{
+    explicit ActivationFixture(const std::string& inputShape,
+                               const std::string& outputShape,
+                               const std::string& dataType,
+                               const std::string& activationType="Sigmoid",
+                               const std::string& a = "0.0",
+                               const std::string& b = "0.0")
+    {
+        m_JsonString = R"(
+        {
+            inputIds: [0],
+            outputIds: [2],
+            layers: [{
+                layer_type: "InputLayer",
+                layer: {
+                    base: {
+                        layerBindingId: 0,
+                        base: {
+                            index: 0,
+                            layerName: "InputLayer",
+                            layerType: "Input",
+                            inputSlots: [{
+                                index: 0,
+                                connection: {sourceLayerIndex:0, outputSlotIndex:0 },
+                            }],
+                            outputSlots: [{
+                                index: 0,
+                                tensorInfo: {
+                                    dimensions: )" + inputShape + R"(,
+                                    dataType: )" + dataType + R"(
+                                },
+                            }],
+                        },
+                    }
+                },
+            },
+            {
+                layer_type: "ActivationLayer",
+                layer : {
+                    base: {
+                        index:1,
+                        layerName: "ActivationLayer",
+                        layerType: "Activation",
+                        inputSlots: [{
+                            index: 0,
+                            connection: {sourceLayerIndex:0, outputSlotIndex:0 },
+                        }],
+                        outputSlots: [{
+                            index: 0,
+                            tensorInfo: {
+                                dimensions: )" + outputShape + R"(,
+                                dataType: )" + dataType + R"(
+                            },
+                        }],
+                    },
+                    descriptor: {
+                        a: )" + a + R"(,
+                        b: )" + b + R"(,
+                        function: )" + activationType + R"(
+                    },
+                },
+            },
+            {
+                layer_type: "OutputLayer",
+                layer: {
+                    base:{
+                        layerBindingId: 2,
+                        base: {
+                            index: 2,
+                            layerName: "OutputLayer",
+                            layerType: "Output",
+                            inputSlots: [{
+                                index: 0,
+                                connection: {sourceLayerIndex:1, outputSlotIndex:0 },
+                            }],
+                            outputSlots: [{
+                                index: 0,
+                                tensorInfo: {
+                                    dimensions: )" + outputShape + R"(,
+                                    dataType: )" + dataType + R"(
+                                },
+                            }],
+                        }
+                    }
+                },
+            }]
+        }
+        )";
+        Setup();
+    }
+};
+
+struct SimpleActivationFixture : ActivationFixture
+{
+    SimpleActivationFixture() : ActivationFixture("[1, 2, 2, 1]",
+                                                  "[1, 2, 2, 1]",
+                                                  "QuantisedAsymm8",
+                                                  "ReLu") {}
+};
+
+struct SimpleActivationFixture2 : ActivationFixture
+{
+    SimpleActivationFixture2() : ActivationFixture("[1, 2, 2, 1]",
+                                                   "[1, 2, 2, 1]",
+                                                   "Float32",
+                                                   "ReLu") {}
+};
+
+struct SimpleActivationFixture3 : ActivationFixture
+{
+    SimpleActivationFixture3() : ActivationFixture("[1, 2, 2, 1]",
+                                                   "[1, 2, 2, 1]",
+                                                   "QuantisedAsymm8",
+                                                   "BoundedReLu",
+                                                   "5.0",
+                                                   "0.0") {}
+};
+
+struct SimpleActivationFixture4 : ActivationFixture
+{
+    SimpleActivationFixture4() : ActivationFixture("[1, 2, 2, 1]",
+                                                   "[1, 2, 2, 1]",
+                                                   "Float32",
+                                                   "BoundedReLu",
+                                                   "5.0",
+                                                   "0.0") {}
+};
+
+
+BOOST_FIXTURE_TEST_CASE(ActivationReluQuantisedAsymm8, SimpleActivationFixture)
+{
+    RunTest<4, armnn::DataType::QuantisedAsymm8>(
+            0,
+            {{"InputLayer", {10, 0, 2, 0}}},
+            {{"OutputLayer", {10, 0, 2, 0}}});
+}
+
+BOOST_FIXTURE_TEST_CASE(ActivationReluFloat32, SimpleActivationFixture2)
+{
+    RunTest<4, armnn::DataType::Float32>(
+            0,
+            {{"InputLayer", {111, -85, 226, 3}}},
+            {{"OutputLayer", {111, 0, 226, 3}}});
+}
+
+
+BOOST_FIXTURE_TEST_CASE(ActivationBoundedReluQuantisedAsymm8, SimpleActivationFixture3)
+{
+    RunTest<4, armnn::DataType::QuantisedAsymm8>(
+            0,
+            {{"InputLayer", {10, 0, 2, 0}}},
+            {{"OutputLayer", {5, 0, 2, 0}}});
+}
+
+BOOST_FIXTURE_TEST_CASE(ActivationBoundedReluFloat32, SimpleActivationFixture4)
+{
+    RunTest<4, armnn::DataType::Float32>(
+            0,
+            {{"InputLayer", {111, -85, 226, 3}}},
+            {{"OutputLayer", {5, 0, 5, 3}}});
+}
+
+BOOST_AUTO_TEST_SUITE_END()
diff --git a/src/armnnSerializer/Schema.fbs b/src/armnnSerializer/Schema.fbs
index 1b7427b..e813651 100644
--- a/src/armnnSerializer/Schema.fbs
+++ b/src/armnnSerializer/Schema.fbs
@@ -9,6 +9,19 @@
 
 file_extension "armnn";
 
+enum ActivationFunction : byte {
+    Sigmoid = 0,
+    TanH = 1,
+    Linear = 2,
+    ReLu = 3,
+    BoundedReLu = 4,
+    SoftReLu = 5,
+    LeakyReLu = 6,
+    Abs = 7,
+    Sqrt = 8,
+    Square = 9
+}
+
 enum DataType : byte {
     Float16 = 0,
     Float32 = 1,
@@ -76,7 +89,8 @@
     Reshape = 5,
     Softmax = 6,
     Convolution2d = 7,
-    DepthwiseConvolution2d = 8
+    DepthwiseConvolution2d = 8,
+    Activation = 9
 }
 
 // Base layer table to be used as part of other layers
@@ -94,6 +108,17 @@
 }
 
 // Table for each layer defined below
+table ActivationLayer {
+    base:LayerBase;
+    descriptor:ActivationDescriptor;
+}
+
+table ActivationDescriptor {
+    function:ActivationFunction = Sigmoid;
+    a:float;
+    b:float;
+}
+
 table AdditionLayer {
     base:LayerBase;
 }
@@ -201,6 +226,7 @@
 }
 
 union Layer {
+    ActivationLayer,
     AdditionLayer,
     Convolution2dLayer,
     DepthwiseConvolution2dLayer,
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index b85c45a..bee1a3c 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -22,6 +22,33 @@
 namespace armnnSerializer
 {
 
+serializer::ActivationFunction GetFlatBufferActivationFunction(armnn::ActivationFunction function)
+{
+    switch (function)
+    {
+        case armnn::ActivationFunction::Sigmoid:
+            return serializer::ActivationFunction::ActivationFunction_Sigmoid;
+        case armnn::ActivationFunction::TanH:
+            return serializer::ActivationFunction::ActivationFunction_TanH;
+        case armnn::ActivationFunction::Linear:
+            return serializer::ActivationFunction::ActivationFunction_Linear;
+        case armnn::ActivationFunction::ReLu:
+            return serializer::ActivationFunction::ActivationFunction_ReLu;
+        case armnn::ActivationFunction::BoundedReLu:
+            return serializer::ActivationFunction::ActivationFunction_BoundedReLu;
+        case armnn::ActivationFunction::LeakyReLu:
+            return serializer::ActivationFunction::ActivationFunction_LeakyReLu;
+        case armnn::ActivationFunction::Abs:
+            return serializer::ActivationFunction::ActivationFunction_Abs;
+        case armnn::ActivationFunction::Sqrt:
+            return serializer::ActivationFunction::ActivationFunction_Sqrt;
+        case armnn::ActivationFunction::Square:
+            return serializer::ActivationFunction::ActivationFunction_Square;
+        default:
+            return serializer::ActivationFunction::ActivationFunction_Sigmoid;
+    }
+}
+
 uint32_t SerializerVisitor::GetSerializedId(unsigned int guid)
 {
     std::pair<unsigned int, uint32_t> guidPair(guid, m_layerId);
@@ -78,6 +105,29 @@
     CreateAnyLayer(flatBufferOutputLayer.o, serializer::Layer::Layer_OutputLayer);
 }
 
+// Build FlatBuffer for Activation Layer
+void SerializerVisitor::VisitActivationLayer(const armnn::IConnectableLayer* layer,
+                                             const armnn::ActivationDescriptor& descriptor,
+                                             const char* name)
+{
+    // Create FlatBuffer BaseLayer
+    auto flatBufferBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Activation);
+
+    // Create the FlatBuffer ActivationDescriptor
+    auto flatBufferDescriptor = CreateActivationDescriptor(m_flatBufferBuilder,
+                                                           GetFlatBufferActivationFunction(descriptor.m_Function),
+                                                           descriptor.m_A,
+                                                           descriptor.m_B);
+
+    // Create the FlatBuffer ActivationLayer
+    auto flatBufferAdditionLayer = CreateActivationLayer(m_flatBufferBuilder,
+                                                         flatBufferBaseLayer,
+                                                         flatBufferDescriptor);
+
+    // Add the AnyLayer to the FlatBufferLayers
+    CreateAnyLayer(flatBufferAdditionLayer.o, serializer::Layer::Layer_ActivationLayer);
+}
+
 // Build FlatBuffer for Addition Layer
 void SerializerVisitor::VisitAdditionLayer(const armnn::IConnectableLayer* layer, const char* name)
 {
diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp
index aa765a2..0c442e0 100644
--- a/src/armnnSerializer/Serializer.hpp
+++ b/src/armnnSerializer/Serializer.hpp
@@ -42,6 +42,10 @@
         return m_serializedLayers;
     }
 
+    void VisitActivationLayer(const armnn::IConnectableLayer* layer,
+                              const armnn::ActivationDescriptor& descriptor,
+                              const char* name = nullptr) override;
+
     void VisitAdditionLayer(const armnn::IConnectableLayer* layer,
                             const char* name = nullptr) override;
 
diff --git a/src/armnnSerializer/test/ActivationSerializationTests.cpp b/src/armnnSerializer/test/ActivationSerializationTests.cpp
new file mode 100644
index 0000000..c20f286
--- /dev/null
+++ b/src/armnnSerializer/test/ActivationSerializationTests.cpp
@@ -0,0 +1,78 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include <armnnDeserializer/IDeserializer.hpp>
+#include <armnn/ArmNN.hpp>
+#include <armnn/INetwork.hpp>
+#include "../Serializer.hpp"
+#include <sstream>
+#include <boost/test/unit_test.hpp>
+
+BOOST_AUTO_TEST_SUITE(SerializerTests)
+
+BOOST_AUTO_TEST_CASE(ActivationSerialization)
+{
+    armnnDeserializer::IDeserializerPtr parser = armnnDeserializer::IDeserializer::Create();
+
+    armnn::TensorInfo inputInfo(armnn::TensorShape({1, 2, 2, 1}), armnn::DataType::Float32, 1.0f, 0);
+    armnn::TensorInfo outputInfo(armnn::TensorShape({1, 2, 2, 1}), armnn::DataType::Float32, 4.0f, 0);
+
+    // Construct network
+    armnn::INetworkPtr network = armnn::INetwork::Create();
+
+    armnn::ActivationDescriptor descriptor;
+    descriptor.m_Function = armnn::ActivationFunction::ReLu;
+    descriptor.m_A = 0;
+    descriptor.m_B = 0;
+
+    armnn::IConnectableLayer* const inputLayer      = network->AddInputLayer(0, "input");
+    armnn::IConnectableLayer* const activationLayer = network->AddActivationLayer(descriptor, "activation");
+    armnn::IConnectableLayer* const outputLayer     = network->AddOutputLayer(0, "output");
+
+    inputLayer->GetOutputSlot(0).Connect(activationLayer->GetInputSlot(0));
+    inputLayer->GetOutputSlot(0).SetTensorInfo(inputInfo);
+
+    activationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+    activationLayer->GetOutputSlot(0).SetTensorInfo(outputInfo);
+
+    armnnSerializer::Serializer serializer;
+    serializer.Serialize(*network);
+
+    std::stringstream stream;
+    serializer.SaveSerializedToStream(stream);
+
+    std::string const serializerString{stream.str()};
+    std::vector<std::uint8_t> const serializerVector{serializerString.begin(), serializerString.end()};
+
+    armnn::INetworkPtr deserializedNetwork = parser->CreateNetworkFromBinary(serializerVector);
+
+    armnn::IRuntime::CreationOptions options; // default options
+    armnn::IRuntimePtr run = armnn::IRuntime::Create(options);
+    auto deserializedOptimized = Optimize(*deserializedNetwork, { armnn::Compute::CpuRef }, run->GetDeviceSpec());
+
+    armnn::NetworkId networkIdentifier;
+
+    // Load graph into runtime
+    run->LoadNetwork(networkIdentifier, std::move(deserializedOptimized));
+
+    std::vector<float> inputData {0.0f, -5.3f, 42.0f, -42.0f};
+    armnn::InputTensors inputTensors
+    {
+        {0, armnn::ConstTensor(run->GetInputTensorInfo(networkIdentifier, 0), inputData.data())}
+    };
+
+    std::vector<float> expectedOutputData {0.0f, 0.0f, 42.0f, 0.0f};
+
+    std::vector<float> outputData(4);
+    armnn::OutputTensors outputTensors
+    {
+        {0, armnn::Tensor(run->GetOutputTensorInfo(networkIdentifier, 0), outputData.data())}
+    };
+    run->EnqueueWorkload(networkIdentifier, inputTensors, outputTensors);
+    BOOST_CHECK_EQUAL_COLLECTIONS(outputData.begin(), outputData.end(),
+    expectedOutputData.begin(), expectedOutputData.end());
+}
+
+BOOST_AUTO_TEST_SUITE_END()