IVGCVSW-2700 Serialize/de-serialize the Normalization layer

Change-Id: Ib307ec6c28beb6c158d337678e67a2484c495a06
Signed-off-by: Nina Drozd <nina.drozd@arm.com>
diff --git a/CMakeLists.txt b/CMakeLists.txt
index a078d73..a21b5f2 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -604,6 +604,7 @@
                 src/armnnDeserializer/test/DeserializeEqual.cpp
                 src/armnnDeserializer/test/DeserializeFullyConnected.cpp
                 src/armnnDeserializer/test/DeserializeMultiplication.cpp
+                src/armnnDeserializer/test/DeserializeNormalization.cpp
                 src/armnnDeserializer/test/DeserializePermute.cpp
                 src/armnnDeserializer/test/DeserializePooling2d.cpp
                 src/armnnDeserializer/test/DeserializeReshape.cpp
diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp
index 076b23e..c7049f6 100644
--- a/src/armnnDeserializer/Deserializer.cpp
+++ b/src/armnnDeserializer/Deserializer.cpp
@@ -197,6 +197,7 @@
     m_ParserFunctions[Layer_MinimumLayer]                = &Deserializer::ParseMinimum;
     m_ParserFunctions[Layer_MaximumLayer]                = &Deserializer::ParseMaximum;
     m_ParserFunctions[Layer_MultiplicationLayer]         = &Deserializer::ParseMultiplication;
+    m_ParserFunctions[Layer_NormalizationLayer]          = &Deserializer::ParseNormalization;
     m_ParserFunctions[Layer_PermuteLayer]                = &Deserializer::ParsePermute;
     m_ParserFunctions[Layer_Pooling2dLayer]              = &Deserializer::ParsePooling2d;
     m_ParserFunctions[Layer_ReshapeLayer]                = &Deserializer::ParseReshape;
@@ -236,6 +237,8 @@
             return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
         case Layer::Layer_MultiplicationLayer:
             return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
+        case Layer::Layer_NormalizationLayer:
+            return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
         case Layer::Layer_OutputLayer:
             return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
         case Layer::Layer_PermuteLayer:
@@ -1360,4 +1363,96 @@
     RegisterOutputSlots(graph, layerIndex, layer);
 }
 
+armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
+    Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
+    unsigned int layerIndex)
+{
+    armnn::NormalizationDescriptor desc;
+
+    switch (normalizationDescriptor->normChannelType())
+    {
+        case NormalizationAlgorithmChannel_Across:
+        {
+            desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
+            break;
+        }
+        case NormalizationAlgorithmChannel_Within:
+        {
+            desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
+            break;
+        }
+        default:
+        {
+            BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
+        }
+    }
+
+    switch (normalizationDescriptor->normMethodType())
+    {
+        case NormalizationAlgorithmMethod_LocalBrightness:
+        {
+            desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
+            break;
+        }
+        case NormalizationAlgorithmMethod_LocalContrast:
+        {
+            desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
+            break;
+        }
+        default:
+        {
+            BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
+        }
+    }
+
+    switch (normalizationDescriptor->dataLayout())
+    {
+        case DataLayout_NCHW:
+        {
+            desc.m_DataLayout = armnn::DataLayout::NCHW;
+            break;
+        }
+        case DataLayout_NHWC:
+        {
+            desc.m_DataLayout = armnn::DataLayout::NHWC;
+            break;
+        }
+        default:
+        {
+            BOOST_ASSERT_MSG(false, "Unsupported data layout");
+        }
+    }
+
+    desc.m_Alpha    = normalizationDescriptor->alpha();
+    desc.m_Beta     = normalizationDescriptor->beta();
+    desc.m_K        = normalizationDescriptor->k();
+    desc.m_NormSize = normalizationDescriptor->normSize();
+
+    return desc;
+}
+
+void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
+{
+    CHECK_LAYERS(graph, 0, layerIndex);
+
+    auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
+
+    Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
+    CHECK_VALID_SIZE(inputs.size(), 1);
+
+    Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
+    CHECK_VALID_SIZE(outputs.size(), 1);
+
+    auto outputInfo = ToTensorInfo(outputs[0]);
+
+    auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
+    auto layerName = GetLayerName(graph, layerIndex);
+
+    IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
+    layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
+
+    RegisterInputSlots(graph, layerIndex, layer);
+    RegisterOutputSlots(graph, layerIndex, layer);
+}
+
 } // namespace armnnDeserializer
diff --git a/src/armnnDeserializer/Deserializer.hpp b/src/armnnDeserializer/Deserializer.hpp
index 1dd7ec5..fba8b88 100644
--- a/src/armnnDeserializer/Deserializer.hpp
+++ b/src/armnnDeserializer/Deserializer.hpp
@@ -19,6 +19,7 @@
     using GraphPtr = const armnnSerializer::SerializedGraph *;
     using TensorRawPtr = const armnnSerializer::TensorInfo *;
     using PoolingDescriptor = const armnnSerializer::Pooling2dDescriptor *;
+    using NormalizationDescriptorPtr = const armnnSerializer::NormalizationDescriptor *;
     using TensorRawPtrVector = std::vector<TensorRawPtr>;
     using LayerRawPtr = const armnnSerializer::LayerBase *;
     using LayerBaseRawPtr = const armnnSerializer::LayerBase *;
@@ -51,8 +52,10 @@
     static LayerBaseRawPtr GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex);
     static int32_t GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex);
     static std::string GetLayerName(const GraphPtr& graph, unsigned int index);
-    armnn::Pooling2dDescriptor GetPoolingDescriptor(PoolingDescriptor pooling2dDescriptor,
-                                                    unsigned int layerIndex);
+    static armnn::Pooling2dDescriptor GetPoolingDescriptor(PoolingDescriptor pooling2dDescriptor,
+                                                           unsigned int layerIndex);
+    static armnn::NormalizationDescriptor GetNormalizationDescriptor(
+        NormalizationDescriptorPtr normalizationDescriptor, unsigned int layerIndex);
     static armnn::TensorInfo OutputShapeOfReshape(const armnn::TensorInfo & inputTensorInfo,
                                                   const std::vector<uint32_t> & targetDimsIn);
 
@@ -80,6 +83,7 @@
     void ParseMinimum(GraphPtr graph, unsigned int layerIndex);
     void ParseMaximum(GraphPtr graph, unsigned int layerIndex);
     void ParseMultiplication(GraphPtr graph, unsigned int layerIndex);
+    void ParseNormalization(GraphPtr graph, unsigned int layerIndex);
     void ParsePermute(GraphPtr graph, unsigned int layerIndex);
     void ParsePooling2d(GraphPtr graph, unsigned int layerIndex);
     void ParseReshape(GraphPtr graph, unsigned int layerIndex);
diff --git a/src/armnnDeserializer/DeserializerSupport.md b/src/armnnDeserializer/DeserializerSupport.md
index bb2f063..cf8f6de 100644
--- a/src/armnnDeserializer/DeserializerSupport.md
+++ b/src/armnnDeserializer/DeserializerSupport.md
@@ -18,6 +18,7 @@
 * Maximum
 * Minimum
 * Multiplication
+* Normalization
 * Permute
 * Pooling2d
 * Reshape
diff --git a/src/armnnDeserializer/test/DeserializeNormalization.cpp b/src/armnnDeserializer/test/DeserializeNormalization.cpp
new file mode 100644
index 0000000..eb7e958
--- /dev/null
+++ b/src/armnnDeserializer/test/DeserializeNormalization.cpp
@@ -0,0 +1,143 @@
+//
+// Copyright © 2017 Arm Ltd. All rights reserved.
+// SPDX-License-Identifier: MIT
+//
+
+#include <boost/test/unit_test.hpp>
+#include "ParserFlatbuffersSerializeFixture.hpp"
+#include "../Deserializer.hpp"
+
+#include <string>
+#include <iostream>
+
+BOOST_AUTO_TEST_SUITE(Deserializer)
+
+struct NormalizationFixture : public ParserFlatbuffersSerializeFixture
+{
+    explicit NormalizationFixture(const std::string &inputShape,
+        const std::string & outputShape,
+        const std::string &dataType,
+        const std::string &normAlgorithmChannel,
+        const std::string &normAlgorithmMethod,
+        const std::string &dataLayout)
+    {
+        m_JsonString = R"(
+        {
+            inputIds: [0],
+            outputIds: [2],
+            layers: [{
+                layer_type: "InputLayer",
+                layer: {
+                    base: {
+                        layerBindingId: 0,
+                        base: {
+                            index: 0,
+                            layerName: "InputLayer",
+                            layerType: "Input",
+                            inputSlots: [{
+                                index: 0,
+                                connection: {sourceLayerIndex:0, outputSlotIndex:0 },
+                                }],
+                            outputSlots: [{
+                                index: 0,
+                                tensorInfo: {
+                                    dimensions: )" + inputShape + R"(,
+                                    dataType: )" + dataType + R"(,
+                                    quantizationScale: 0.5,
+                                    quantizationOffset: 0
+                                    },
+                                }]
+                            },
+                        }
+                    },
+                },
+            {
+            layer_type: "NormalizationLayer",
+            layer : {
+                base: {
+                    index:1,
+                    layerName: "NormalizationLayer",
+                    layerType: "Normalization",
+                    inputSlots: [{
+                            index: 0,
+                            connection: {sourceLayerIndex:0, outputSlotIndex:0 },
+                        }],
+                    outputSlots: [{
+                        index: 0,
+                        tensorInfo: {
+                            dimensions: )" + outputShape + R"(,
+                            dataType: )" + dataType + R"(
+                        },
+                        }],
+                    },
+                descriptor: {
+                    normChannelType: )" + normAlgorithmChannel + R"(,
+                    normMethodType: )" + normAlgorithmMethod + R"(,
+                    normSize: 3,
+                    alpha: 1,
+                    beta: 1,
+                    k: 1,
+                    dataLayout: )" + dataLayout + R"(
+                    }
+                },
+            },
+            {
+            layer_type: "OutputLayer",
+            layer: {
+                base:{
+                    layerBindingId: 0,
+                    base: {
+                        index: 2,
+                        layerName: "OutputLayer",
+                        layerType: "Output",
+                        inputSlots: [{
+                            index: 0,
+                            connection: {sourceLayerIndex:1, outputSlotIndex:0 },
+                        }],
+                        outputSlots: [ {
+                            index: 0,
+                            tensorInfo: {
+                                dimensions: )" + outputShape + R"(,
+                                dataType: )" + dataType + R"(
+                            },
+                        }],
+                    }
+                }},
+            }]
+        }
+ )";
+        SetupSingleInputSingleOutput("InputLayer", "OutputLayer");
+    }
+};
+
+struct FloatNhwcLocalBrightnessAcrossNormalizationFixture : NormalizationFixture
+{
+    FloatNhwcLocalBrightnessAcrossNormalizationFixture() : NormalizationFixture("[ 2, 2, 2, 1 ]", "[ 2, 2, 2, 1 ]",
+        "Float32", "0", "0", "NHWC") {}
+};
+
+
+BOOST_FIXTURE_TEST_CASE(Float32NormalizationNhwcDataLayout, FloatNhwcLocalBrightnessAcrossNormalizationFixture)
+{
+    RunTest<4, armnn::DataType::Float32>(0, { 1.0f, 2.0f, 3.0f, 4.0f,
+                                              5.0f, 6.0f, 7.0f, 8.0f },
+                                            { 0.5f, 0.400000006f, 0.300000012f, 0.235294119f,
+                                              0.192307696f, 0.16216217f, 0.140000001f, 0.123076923f });
+}
+
+struct FloatNchwLocalBrightnessWithinNormalizationFixture : NormalizationFixture
+{
+    FloatNchwLocalBrightnessWithinNormalizationFixture() : NormalizationFixture("[ 2, 1, 2, 2 ]", "[ 2, 1, 2, 2 ]",
+        "Float32", "1", "0", "NCHW") {}
+};
+
+BOOST_FIXTURE_TEST_CASE(Float32NormalizationNchwDataLayout, FloatNchwLocalBrightnessWithinNormalizationFixture)
+{
+    RunTest<4, armnn::DataType::Float32>(0, { 1.0f, 2.0f, 3.0f, 4.0f,
+                                              5.0f, 6.0f, 7.0f, 8.0f },
+                                            { 0.0322581f, 0.0645161f, 0.0967742f, 0.1290323f,
+                                              0.0285714f, 0.0342857f, 0.04f, 0.0457143f });
+}
+
+
+BOOST_AUTO_TEST_SUITE_END()
\ No newline at end of file
diff --git a/src/armnnSerializer/ArmnnSchema.fbs b/src/armnnSerializer/ArmnnSchema.fbs
index b59adcf..cde0087 100644
--- a/src/armnnSerializer/ArmnnSchema.fbs
+++ b/src/armnnSerializer/ArmnnSchema.fbs
@@ -99,7 +99,8 @@
     Division = 15,
     Minimum = 16,
     Equal = 17,
-    Maximum = 18
+    Maximum = 18,
+    Normalization = 19
 }
 
 // Base layer table to be used as part of other layers
@@ -298,6 +299,31 @@
     dataLayout:DataLayout;
 }
 
+enum NormalizationAlgorithmChannel : byte {
+    Across = 0,
+    Within = 1
+}
+
+enum NormalizationAlgorithmMethod : byte {
+    LocalBrightness = 0,
+    LocalContrast = 1
+}
+
+table NormalizationLayer {
+    base:LayerBase;
+    descriptor:NormalizationDescriptor;
+}
+
+table NormalizationDescriptor {
+    normChannelType:NormalizationAlgorithmChannel = Across;
+    normMethodType:NormalizationAlgorithmMethod = LocalBrightness;
+    normSize:uint;
+    alpha:float;
+    beta:float;
+    k:float;
+    dataLayout:DataLayout = NCHW;
+}
+
 union Layer {
     ActivationLayer,
     AdditionLayer,
@@ -317,7 +343,8 @@
     DivisionLayer,
     MinimumLayer,
     EqualLayer,
-    MaximumLayer
+    MaximumLayer,
+    NormalizationLayer
 }
 
 table AnyLayer {
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index a94a319..2000726 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -470,6 +470,29 @@
     CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_SpaceToBatchNdLayer);
 }
 
+void SerializerVisitor::VisitNormalizationLayer(const armnn::IConnectableLayer* layer,
+                                                const armnn::NormalizationDescriptor& descriptor,
+                                                const char* name)
+{
+    auto fbNormalizationBaseLayer  = CreateLayerBase(layer, serializer::LayerType::LayerType_Normalization);
+
+    auto fbNormalizationDescriptor = serializer::CreateNormalizationDescriptor(
+        m_flatBufferBuilder,
+        GetFlatBufferNormalizationAlgorithmChannel(descriptor.m_NormChannelType),
+        GetFlatBufferNormalizationAlgorithmMethod(descriptor.m_NormMethodType),
+        descriptor.m_NormSize,
+        descriptor.m_Alpha,
+        descriptor.m_Beta,
+        descriptor.m_K,
+        GetFlatBufferDataLayout(descriptor.m_DataLayout));
+
+    auto flatBufferLayer = serializer::CreateNormalizationLayer(m_flatBufferBuilder,
+                                                                fbNormalizationBaseLayer,
+                                                                fbNormalizationDescriptor);
+
+    CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_NormalizationLayer);
+}
+
 fb::Offset<serializer::LayerBase> SerializerVisitor::CreateLayerBase(const IConnectableLayer* layer,
                                                                      const serializer::LayerType layerType)
 {
diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp
index 3d6f1b5..7e6097c 100644
--- a/src/armnnSerializer/Serializer.hpp
+++ b/src/armnnSerializer/Serializer.hpp
@@ -118,6 +118,9 @@
                                   const armnn::SpaceToBatchNdDescriptor& spaceToBatchNdDescriptor,
                                   const char* name = nullptr) override;
 
+    void VisitNormalizationLayer(const armnn::IConnectableLayer* layer,
+                                 const armnn::NormalizationDescriptor& normalizationDescriptor,
+                                 const char* name = nullptr) override;
 private:
 
     /// Creates the Input Slots and Output Slots and LayerBase for the layer.
diff --git a/src/armnnSerializer/SerializerSupport.md b/src/armnnSerializer/SerializerSupport.md
index 83987f9..d018a35 100644
--- a/src/armnnSerializer/SerializerSupport.md
+++ b/src/armnnSerializer/SerializerSupport.md
@@ -18,6 +18,7 @@
 * Maximum
 * Minimum
 * Multiplication
+* Normalization
 * Permute
 * Pooling2d
 * Reshape
diff --git a/src/armnnSerializer/SerializerUtils.cpp b/src/armnnSerializer/SerializerUtils.cpp
index 592f258..bfe795c 100644
--- a/src/armnnSerializer/SerializerUtils.cpp
+++ b/src/armnnSerializer/SerializerUtils.cpp
@@ -96,4 +96,32 @@
     }
 }
 
+armnnSerializer::NormalizationAlgorithmChannel GetFlatBufferNormalizationAlgorithmChannel(
+    armnn::NormalizationAlgorithmChannel normalizationAlgorithmChannel)
+{
+    switch (normalizationAlgorithmChannel)
+    {
+        case armnn::NormalizationAlgorithmChannel::Across:
+            return armnnSerializer::NormalizationAlgorithmChannel::NormalizationAlgorithmChannel_Across;
+        case armnn::NormalizationAlgorithmChannel::Within:
+            return armnnSerializer::NormalizationAlgorithmChannel::NormalizationAlgorithmChannel_Within;
+        default:
+            return armnnSerializer::NormalizationAlgorithmChannel::NormalizationAlgorithmChannel_Across;
+    }
+}
+
+armnnSerializer::NormalizationAlgorithmMethod GetFlatBufferNormalizationAlgorithmMethod(
+    armnn::NormalizationAlgorithmMethod normalizationAlgorithmMethod)
+{
+    switch (normalizationAlgorithmMethod)
+    {
+        case armnn::NormalizationAlgorithmMethod::LocalBrightness:
+            return armnnSerializer::NormalizationAlgorithmMethod::NormalizationAlgorithmMethod_LocalBrightness;
+        case armnn::NormalizationAlgorithmMethod::LocalContrast:
+            return armnnSerializer::NormalizationAlgorithmMethod::NormalizationAlgorithmMethod_LocalContrast;
+        default:
+            return armnnSerializer::NormalizationAlgorithmMethod::NormalizationAlgorithmMethod_LocalBrightness;
+    }
+}
+
 } // namespace armnnSerializer
\ No newline at end of file
diff --git a/src/armnnSerializer/SerializerUtils.hpp b/src/armnnSerializer/SerializerUtils.hpp
index 9b1dff9..29cda0d 100644
--- a/src/armnnSerializer/SerializerUtils.hpp
+++ b/src/armnnSerializer/SerializerUtils.hpp
@@ -24,4 +24,10 @@
 
 armnnSerializer::PaddingMethod GetFlatBufferPaddingMethod(armnn::PaddingMethod paddingMethod);
 
+armnnSerializer::NormalizationAlgorithmChannel GetFlatBufferNormalizationAlgorithmChannel(
+    armnn::NormalizationAlgorithmChannel normalizationAlgorithmChannel);
+
+armnnSerializer::NormalizationAlgorithmMethod GetFlatBufferNormalizationAlgorithmMethod(
+    armnn::NormalizationAlgorithmMethod normalizationAlgorithmMethod);
+
 } // namespace armnnSerializer
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index 7e4ff8c..271b3e7 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -891,6 +891,54 @@
     deserializedNetwork->Accept(nameChecker);
 }
 
+BOOST_AUTO_TEST_CASE(SerializeDeserializeNormalization)
+{
+    class VerifyNormalizationName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+    {
+        public:
+        void VisitNormalizationLayer(const armnn::IConnectableLayer*,
+                                     const armnn::NormalizationDescriptor& normalizationDescriptor,
+                                     const char* name) override
+        {
+            BOOST_TEST(name == "NormalizationLayer");
+        }
+    };
+
+    unsigned int inputShape[] = {2, 1, 2, 2};
+    unsigned int outputShape[] = {2, 1, 2, 2};
+
+    armnn::NormalizationDescriptor desc;
+    desc.m_DataLayout = armnn::DataLayout::NCHW;
+    desc.m_NormSize = 3;
+    desc.m_Alpha = 1;
+    desc.m_Beta = 1;
+    desc.m_K = 1;
+
+    auto inputTensorInfo = armnn::TensorInfo(4, inputShape, armnn::DataType::Float32);
+    auto outputTensorInfo = armnn::TensorInfo(4, outputShape, armnn::DataType::Float32);
+
+    armnn::INetworkPtr network = armnn::INetwork::Create();
+    armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+    armnn::IConnectableLayer* const normalizationLayer = network->AddNormalizationLayer(desc, "NormalizationLayer");
+    armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
+
+    inputLayer->GetOutputSlot(0).Connect(normalizationLayer->GetInputSlot(0));
+    inputLayer->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
+    normalizationLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+    normalizationLayer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
+
+    armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+    BOOST_CHECK(deserializedNetwork);
+
+    VerifyNormalizationName nameChecker;
+    deserializedNetwork->Accept(nameChecker);
+
+    CheckDeserializedNetworkAgainstOriginal(*network,
+                                            *deserializedNetwork,
+                                            {inputTensorInfo.GetShape()},
+                                            {outputTensorInfo.GetShape()});
+}
+
 BOOST_AUTO_TEST_CASE(SerializeDeserializeEqual)
 {
     class VerifyEqualName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
@@ -932,5 +980,4 @@
                                             {outputTensorInfo.GetShape()},
                                             {0, 1});
 }
-
 BOOST_AUTO_TEST_SUITE_END()