IVGCVSW-2693 Serialize/de-serialize L2Normalization

Signed-off-by: Narumol Prangnawarat <narumol.prangnawarat@arm.com>
Change-Id: I6a53ac576260383f32fb0d878b42d1251ffde94a
diff --git a/src/armnnDeserializer/Deserializer.cpp b/src/armnnDeserializer/Deserializer.cpp
index d62751d..719e47e 100644
--- a/src/armnnDeserializer/Deserializer.cpp
+++ b/src/armnnDeserializer/Deserializer.cpp
@@ -198,6 +198,7 @@
     m_ParserFunctions[Layer_FloorLayer]                  = &Deserializer::ParseFloor;
     m_ParserFunctions[Layer_GatherLayer]                 = &Deserializer::ParseGather;
     m_ParserFunctions[Layer_GreaterLayer]                = &Deserializer::ParseGreater;
+    m_ParserFunctions[Layer_L2NormalizationLayer]        = &Deserializer::ParseL2Normalization;
     m_ParserFunctions[Layer_MaximumLayer]                = &Deserializer::ParseMaximum;
     m_ParserFunctions[Layer_MeanLayer]                   = &Deserializer::ParseMean;
     m_ParserFunctions[Layer_MinimumLayer]                = &Deserializer::ParseMinimum;
@@ -250,6 +251,8 @@
             return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
         case Layer::Layer_InputLayer:
             return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
+        case Layer::Layer_L2NormalizationLayer:
+            return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
         case Layer::Layer_MeanLayer:
             return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
         case Layer::Layer_MinimumLayer:
@@ -1074,6 +1077,31 @@
     RegisterOutputSlots(graph, layerIndex, layer);
 }
 
+void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
+{
+    CHECK_LAYERS(graph, 0, layerIndex);
+
+    auto inputs = GetInputs(graph, layerIndex);
+    CHECK_VALID_SIZE(inputs.size(), 1);
+
+    auto outputs = GetOutputs(graph, layerIndex);
+    CHECK_VALID_SIZE(outputs.size(), 1);
+    auto outputInfo = ToTensorInfo(outputs[0]);
+
+    auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
+    auto flatBufferDescriptor = flatBufferLayer->descriptor();
+
+    auto layerName = GetLayerName(graph, layerIndex);
+    armnn::L2NormalizationDescriptor descriptor;
+    descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
+
+    IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
+    layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
+
+    RegisterInputSlots(graph, layerIndex, layer);
+    RegisterOutputSlots(graph, layerIndex, layer);
+}
+
 void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
 {
     CHECK_LAYERS(graph, 0, layerIndex);
diff --git a/src/armnnDeserializer/Deserializer.hpp b/src/armnnDeserializer/Deserializer.hpp
index d085946..3006481 100644
--- a/src/armnnDeserializer/Deserializer.hpp
+++ b/src/armnnDeserializer/Deserializer.hpp
@@ -86,6 +86,7 @@
     void ParseFullyConnected(GraphPtr graph, unsigned int layerIndex);
     void ParseGather(GraphPtr graph, unsigned int layerIndex);
     void ParseGreater(GraphPtr graph, unsigned int layerIndex);
+    void ParseL2Normalization(GraphPtr graph, unsigned int layerIndex);
     void ParseMaximum(GraphPtr graph, unsigned int layerIndex);
     void ParseMean(GraphPtr graph, unsigned int layerIndex);
     void ParseMinimum(GraphPtr graph, unsigned int layerIndex);
diff --git a/src/armnnDeserializer/DeserializerSupport.md b/src/armnnDeserializer/DeserializerSupport.md
index 3762134..ceeae59 100644
--- a/src/armnnDeserializer/DeserializerSupport.md
+++ b/src/armnnDeserializer/DeserializerSupport.md
@@ -19,6 +19,7 @@
 * FullyConnected
 * Gather
 * Greater
+* L2Normalization
 * Maximum
 * Mean
 * Merger
diff --git a/src/armnnSerializer/ArmnnSchema.fbs b/src/armnnSerializer/ArmnnSchema.fbs
index a5fb4b6..36389b7 100644
--- a/src/armnnSerializer/ArmnnSchema.fbs
+++ b/src/armnnSerializer/ArmnnSchema.fbs
@@ -111,7 +111,8 @@
     StridedSlice = 27,
     Gather = 28,
     Mean = 29,
-    Merger = 30
+    Merger = 30,
+    L2Normalization = 31
 }
 
 // Base layer table to be used as part of other layers
@@ -203,6 +204,15 @@
     base:BindableLayerBase;
 }
 
+table L2NormalizationLayer {
+    base:LayerBase;
+    descriptor:L2NormalizationDescriptor;
+}
+
+table L2NormalizationDescriptor {
+    dataLayout:DataLayout = NCHW;
+}
+
 table MinimumLayer {
     base:LayerBase;
 }
@@ -463,7 +473,8 @@
     StridedSliceLayer,
     GatherLayer,
     MeanLayer,
-    MergerLayer
+    MergerLayer,
+    L2NormalizationLayer
 }
 
 table AnyLayer {
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index 3b71e5f..eaf19d5 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -329,6 +329,23 @@
     CreateAnyLayer(fbGreaterLayer.o, serializer::Layer::Layer_GreaterLayer);
 }
 
+void SerializerVisitor::VisitL2NormalizationLayer(const armnn::IConnectableLayer* layer,
+                                                  const armnn::L2NormalizationDescriptor& l2NormalizationDescriptor,
+                                                  const char* name)
+{
+    // Create FlatBuffer BaseLayer
+    auto fbBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_L2Normalization);
+
+    // Create the FlatBuffer L2Normalization Descriptor
+    auto fbDescriptor = serializer::CreateL2NormalizationDescriptor(
+            m_flatBufferBuilder, GetFlatBufferDataLayout(l2NormalizationDescriptor.m_DataLayout));
+
+    // Create Flatuffer layer
+    auto fbLayer = serializer::CreateL2NormalizationLayer(m_flatBufferBuilder, fbBaseLayer, fbDescriptor);
+
+    CreateAnyLayer(fbLayer.o, serializer::Layer::Layer_L2NormalizationLayer);
+}
+
 void SerializerVisitor::VisitMaximumLayer(const armnn::IConnectableLayer* layer, const char* name)
 {
     auto fbMaximumBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Maximum);
diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp
index e93e4ce..c0e70c9 100644
--- a/src/armnnSerializer/Serializer.hpp
+++ b/src/armnnSerializer/Serializer.hpp
@@ -102,6 +102,10 @@
                          armnn::LayerBindingId id,
                          const char* name = nullptr) override;
 
+    void VisitL2NormalizationLayer(const armnn::IConnectableLayer* layer,
+                                   const armnn::L2NormalizationDescriptor& l2NormalizationDescriptor,
+                                   const char* name = nullptr) override;
+
     void VisitMeanLayer(const armnn::IConnectableLayer* layer,
                         const armnn::MeanDescriptor& descriptor,
                         const char* name) override;
diff --git a/src/armnnSerializer/SerializerSupport.md b/src/armnnSerializer/SerializerSupport.md
index ae8691e..d557756 100644
--- a/src/armnnSerializer/SerializerSupport.md
+++ b/src/armnnSerializer/SerializerSupport.md
@@ -19,6 +19,7 @@
 * FullyConnected
 * Gather
 * Greater
+* L2Normalization
 * Maximum
 * Mean
 * Merger
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index 5a054c2..069b9d6 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -422,6 +422,63 @@
                                                    {0, 1});
 }
 
+BOOST_AUTO_TEST_CASE(SerializeDeserializeL2Normalization)
+{
+    class VerifyL2NormalizationName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>
+    {
+    public:
+        explicit VerifyL2NormalizationName(const std::string& expectedL2NormalizationLayerName)
+        : m_ExpectedL2NormalizationLayerName(expectedL2NormalizationLayerName) {}
+
+        void VisitL2NormalizationLayer(const armnn::IConnectableLayer*,
+                                       const armnn::L2NormalizationDescriptor&,
+                                       const char* name) override
+        {
+            BOOST_TEST(name == m_ExpectedL2NormalizationLayerName.c_str());
+        }
+    private:
+        std::string m_ExpectedL2NormalizationLayerName;
+    };
+
+    const armnn::TensorInfo info({ 1, 2, 1, 5 }, armnn::DataType::Float32);
+
+    armnn::L2NormalizationDescriptor desc;
+    desc.m_DataLayout = armnn::DataLayout::NCHW;
+
+    armnn::INetworkPtr network = armnn::INetwork::Create();
+    armnn::IConnectableLayer* const inputLayer0 = network->AddInputLayer(0);
+
+    const char* l2NormLayerName = "l2Normalization";
+
+    armnn::IConnectableLayer* const l2NormLayer = network->AddL2NormalizationLayer(desc, l2NormLayerName);
+    inputLayer0->GetOutputSlot(0).Connect(l2NormLayer->GetInputSlot(0));
+
+    armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
+        l2NormLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+
+    inputLayer0->GetOutputSlot(0).SetTensorInfo(info);
+    l2NormLayer->GetOutputSlot(0).SetTensorInfo(info);
+
+    armnnSerializer::Serializer serializer;
+    serializer.Serialize(*network);
+
+    std::stringstream stream;
+    serializer.SaveSerializedToStream(stream);
+    BOOST_TEST(stream.str().length() > 0);
+    BOOST_TEST(stream.str().find(l2NormLayerName) != stream.str().npos);
+
+    armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(stream.str());
+    BOOST_CHECK(deserializedNetwork);
+
+    VerifyL2NormalizationName nameChecker(l2NormLayerName);
+    deserializedNetwork->Accept(nameChecker);
+
+    CheckDeserializedNetworkAgainstOriginal<float>(*network,
+                                                   *deserializedNetwork,
+                                                   { info.GetShape() },
+                                                   { info.GetShape() });
+}
+
 BOOST_AUTO_TEST_CASE(SerializeDeserializeMultiplication)
 {
     class VerifyMultiplicationName : public armnn::LayerVisitorBase<armnn::VisitorNoThrowPolicy>