IVGCVSW-2870 Serialize quantize layer

Change-Id: I2cff85a3cb4d47aa09227a6810812a142f2aedd3
Signed-off-by: Derek Lamberti <derek.lamberti@arm.com>
diff --git a/src/armnnSerializer/ArmnnSchema.fbs b/src/armnnSerializer/ArmnnSchema.fbs
index 2cceaae..7ac8359 100644
--- a/src/armnnSerializer/ArmnnSchema.fbs
+++ b/src/armnnSerializer/ArmnnSchema.fbs
@@ -116,7 +116,8 @@
     L2Normalization = 31,
     Splitter = 32,
     DetectionPostProcess = 33,
-    Lstm = 34
+    Lstm = 34,
+    Quantize = 35
 }
 
 // Base layer table to be used as part of other layers
@@ -265,6 +266,10 @@
     dataLayout:DataLayout;
 }
 
+table QuantizeLayer {
+    base:LayerBase;
+}
+
 table SoftmaxLayer {
     base:LayerBase;
     descriptor:SoftmaxDescriptor;
@@ -549,7 +554,8 @@
     L2NormalizationLayer,
     SplitterLayer,
     DetectionPostProcessLayer,
-    LstmLayer
+    LstmLayer,
+    QuantizeLayer
 }
 
 table AnyLayer {
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index 2fd8402..83777c9 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -681,6 +681,14 @@
     CreateAnyLayer(fbPooling2dLayer.o, serializer::Layer::Layer_Pooling2dLayer);
 }
 
+void SerializerVisitor::VisitQuantizeLayer(const armnn::IConnectableLayer *layer, const char *name)
+{
+    auto fbQuantizeBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Quantize);
+    auto fbQuantizeLayer = serializer::CreateQuantizeLayer(m_flatBufferBuilder,
+                                                           fbQuantizeBaseLayer);
+    CreateAnyLayer(fbQuantizeLayer.o, serializer::Layer::Layer_QuantizeLayer);
+}
+
 // Build FlatBuffer for FullyConnected Layer
 void SerializerVisitor::VisitFullyConnectedLayer(const armnn::IConnectableLayer* layer,
                                                  const armnn::FullyConnectedDescriptor& fullyConnectedDescriptor,
diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp
index c1a1305..82e1931 100644
--- a/src/armnnSerializer/Serializer.hpp
+++ b/src/armnnSerializer/Serializer.hpp
@@ -149,6 +149,9 @@
                              const armnn::Pooling2dDescriptor& pooling2dDescriptor,
                              const char* name = nullptr) override;
 
+    void VisitQuantizeLayer(const armnn::IConnectableLayer* layer,
+                            const char* name = nullptr) override;
+
     void VisitReshapeLayer(const armnn::IConnectableLayer* layer,
                            const armnn::ReshapeDescriptor& reshapeDescriptor,
                            const char* name = nullptr) override;
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index e3ce6d2..0345e53 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -1542,6 +1542,42 @@
     deserializedNetwork->Accept(verifier);
 }
 
+BOOST_AUTO_TEST_CASE(SerializeQuantize)
+{
+    class QuantizeLayerVerifier : public LayerVerifierBase
+    {
+    public:
+        QuantizeLayerVerifier(const std::string& layerName,
+                             const std::vector<armnn::TensorInfo>& inputInfos,
+                             const std::vector<armnn::TensorInfo>& outputInfos)
+            : LayerVerifierBase(layerName, inputInfos, outputInfos) {}
+
+        void VisitQuantizeLayer(const armnn::IConnectableLayer* layer, const char* name) override
+        {
+            VerifyNameAndConnections(layer, name);
+        }
+    };
+
+    const std::string layerName("quantize");
+    const armnn::TensorInfo info({ 1, 2, 2, 3 }, armnn::DataType::Float32);
+
+    armnn::INetworkPtr network = armnn::INetwork::Create();
+    armnn::IConnectableLayer* const inputLayer = network->AddInputLayer(0);
+    armnn::IConnectableLayer* const quantizeLayer = network->AddQuantizeLayer(layerName.c_str());
+    armnn::IConnectableLayer* const outputLayer = network->AddOutputLayer(0);
+
+    inputLayer->GetOutputSlot(0).Connect(quantizeLayer->GetInputSlot(0));
+    quantizeLayer->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+
+    inputLayer->GetOutputSlot(0).SetTensorInfo(info);
+    quantizeLayer->GetOutputSlot(0).SetTensorInfo(info);
+
+    armnn::INetworkPtr deserializedNetwork = DeserializeNetwork(SerializeNetwork(*network));
+    BOOST_CHECK(deserializedNetwork);
+
+    QuantizeLayerVerifier verifier(layerName, {info}, {info});
+    deserializedNetwork->Accept(verifier);
+}
 BOOST_AUTO_TEST_CASE(SerializeReshape)
 {
     class ReshapeLayerVerifier : public LayerVerifierBase