IVGSVSW-2736 Rename DeserializeParser => Deserializer & fix namespaces

Change-Id: I4166c0bbb5ba7f8b8884e71134c21f43d1fc27b0
Signed-off-by: Derek Lamberti <derek.lamberti@arm.com>
diff --git a/src/armnnSerializer/Schema.fbs b/src/armnnSerializer/Schema.fbs
index 6c542b1..1b7427b 100644
--- a/src/armnnSerializer/Schema.fbs
+++ b/src/armnnSerializer/Schema.fbs
@@ -3,7 +3,7 @@
 // SPDX-License-Identifier: MIT
 //
 
-namespace armnn.armnnSerializer;
+namespace armnnSerializer;
 
 file_identifier "ARMN";
 
diff --git a/src/armnnSerializer/Serializer.cpp b/src/armnnSerializer/Serializer.cpp
index 27204a0..b85c45a 100644
--- a/src/armnnSerializer/Serializer.cpp
+++ b/src/armnnSerializer/Serializer.cpp
@@ -17,7 +17,7 @@
 
 using namespace armnn;
 namespace fb = flatbuffers;
-namespace serializer = armnn::armnnSerializer;
+namespace serializer = armnnSerializer;
 
 namespace armnnSerializer
 {
@@ -40,7 +40,7 @@
 }
 
 // Build FlatBuffer for Input Layer
-void SerializerVisitor::VisitInputLayer(const IConnectableLayer* layer, LayerBindingId id, const char* name)
+void SerializerVisitor::VisitInputLayer(const armnn::IConnectableLayer* layer, LayerBindingId id, const char* name)
 {
     // Create FlatBuffer BaseLayer
     auto flatBufferInputBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Input);
@@ -60,7 +60,7 @@
 }
 
 // Build FlatBuffer for Output Layer
-void SerializerVisitor::VisitOutputLayer(const IConnectableLayer* layer, LayerBindingId id, const char* name)
+void SerializerVisitor::VisitOutputLayer(const armnn::IConnectableLayer* layer, LayerBindingId id, const char* name)
 {
     // Create FlatBuffer BaseLayer
     auto flatBufferOutputBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Output);
@@ -79,7 +79,7 @@
 }
 
 // Build FlatBuffer for Addition Layer
-void SerializerVisitor::VisitAdditionLayer(const IConnectableLayer* layer, const char* name)
+void SerializerVisitor::VisitAdditionLayer(const armnn::IConnectableLayer* layer, const char* name)
 {
     // Create FlatBuffer BaseLayer
     auto flatBufferAdditionBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Addition);
@@ -92,10 +92,10 @@
 }
 
 // Build FlatBuffer for Convolution2dLayer
-void SerializerVisitor::VisitConvolution2dLayer(const IConnectableLayer* layer,
-                                                const Convolution2dDescriptor& descriptor,
-                                                const ConstTensor& weights,
-                                                const Optional<ConstTensor>& biases,
+void SerializerVisitor::VisitConvolution2dLayer(const armnn::IConnectableLayer* layer,
+                                                const armnn::Convolution2dDescriptor& descriptor,
+                                                const armnn::ConstTensor& weights,
+                                                const armnn::Optional<armnn::ConstTensor>& biases,
                                                 const char* name)
 {
     // Create FlatBuffer BaseLayer
@@ -129,10 +129,10 @@
     CreateAnyLayer(flatBufferLayer.o, serializer::Layer::Layer_Convolution2dLayer);
 }
 
-void SerializerVisitor::VisitDepthwiseConvolution2dLayer(const IConnectableLayer* layer,
-                                                         const DepthwiseConvolution2dDescriptor& descriptor,
-                                                         const ConstTensor& weights,
-                                                         const Optional<ConstTensor>& biases,
+void SerializerVisitor::VisitDepthwiseConvolution2dLayer(const armnn::IConnectableLayer* layer,
+                                                         const armnn::DepthwiseConvolution2dDescriptor& descriptor,
+                                                         const armnn::ConstTensor& weights,
+                                                         const armnn::Optional<armnn::ConstTensor>& biases,
                                                          const char* name)
 {
     auto fbBaseLayer  = CreateLayerBase(layer, serializer::LayerType::LayerType_DepthwiseConvolution2d);
@@ -163,7 +163,7 @@
 }
 
 // Build FlatBuffer for Multiplication Layer
-void SerializerVisitor::VisitMultiplicationLayer(const IConnectableLayer* layer, const char* name)
+void SerializerVisitor::VisitMultiplicationLayer(const armnn::IConnectableLayer* layer, const char* name)
 {
     // Create FlatBuffer BaseLayer
     auto flatBufferMultiplicationBaseLayer = CreateLayerBase(layer, serializer::LayerType::LayerType_Multiplication);
@@ -177,7 +177,7 @@
 }
 
 // Build FlatBuffer for Reshape Layer
-void SerializerVisitor::VisitReshapeLayer(const IConnectableLayer* layer,
+void SerializerVisitor::VisitReshapeLayer(const armnn::IConnectableLayer* layer,
                                           const armnn::ReshapeDescriptor& reshapeDescriptor,
                                           const char* name)
 {
@@ -202,8 +202,8 @@
 }
 
 // Build FlatBuffer for Softmax Layer
-void SerializerVisitor::VisitSoftmaxLayer(const IConnectableLayer* layer,
-                                          const SoftmaxDescriptor& softmaxDescriptor,
+void SerializerVisitor::VisitSoftmaxLayer(const armnn::IConnectableLayer* layer,
+                                          const armnn::SoftmaxDescriptor& softmaxDescriptor,
                                           const char* name)
 {
     // Create FlatBuffer BaseLayer
@@ -222,8 +222,8 @@
     CreateAnyLayer(flatBufferSoftmaxLayer.o, serializer::Layer::Layer_SoftmaxLayer);
 }
 
-void SerializerVisitor::VisitPooling2dLayer(const IConnectableLayer* layer,
-                                            const Pooling2dDescriptor& pooling2dDescriptor,
+void SerializerVisitor::VisitPooling2dLayer(const armnn::IConnectableLayer* layer,
+                                            const armnn::Pooling2dDescriptor& pooling2dDescriptor,
                                             const char* name)
 {
     auto fbPooling2dBaseLayer  = CreateLayerBase(layer, serializer::LayerType::LayerType_Pooling2d);
@@ -249,7 +249,7 @@
     CreateAnyLayer(fbPooling2dLayer.o, serializer::Layer::Layer_Pooling2dLayer);
 }
 
-fb::Offset<serializer::LayerBase> SerializerVisitor::CreateLayerBase(const IConnectableLayer* layer,
+fb::Offset<serializer::LayerBase> SerializerVisitor::CreateLayerBase(const armnn::IConnectableLayer* layer,
                                                                      const serializer::LayerType layerType)
 {
     std::vector<fb::Offset<serializer::InputSlot>> inputSlots = CreateInputSlots(layer);
@@ -265,9 +265,7 @@
 
 void SerializerVisitor::CreateAnyLayer(const flatbuffers::Offset<void>& layer, const serializer::Layer serializerLayer)
 {
-    auto anyLayer = armnn::armnnSerializer::CreateAnyLayer(m_flatBufferBuilder,
-                                                           serializerLayer,
-                                                           layer);
+    auto anyLayer = armnnSerializer::CreateAnyLayer(m_flatBufferBuilder, serializerLayer, layer);
     m_serializedLayers.push_back(anyLayer);
 }
 
@@ -280,9 +278,10 @@
     return fbVector;
 }
 
-flatbuffers::Offset<serializer::ConstTensor> SerializerVisitor::CreateConstTensorInfo(const ConstTensor& constTensor)
+flatbuffers::Offset<serializer::ConstTensor>
+    SerializerVisitor::CreateConstTensorInfo(const armnn::ConstTensor& constTensor)
 {
-    TensorInfo tensorInfo = constTensor.GetInfo();
+    armnn::TensorInfo tensorInfo = constTensor.GetInfo();
 
     // Get the dimensions
     std::vector<unsigned int> shape;
@@ -302,8 +301,8 @@
 
     switch (tensorInfo.GetDataType())
     {
-        case DataType::Float32:
-        case DataType::Signed32:
+        case armnn::DataType::Float32:
+        case armnn::DataType::Signed32:
         {
             auto fbVector = CreateDataVector<int32_t>(constTensor.GetMemoryArea(), constTensor.GetNumBytes());
             flatbuffers::Offset<serializer::IntData> flatBuffersData = serializer::CreateIntData(
@@ -312,7 +311,7 @@
             fbPayload = flatBuffersData.o;
             break;
         }
-        case DataType::Float16:
+        case armnn::DataType::Float16:
         {
             auto fbVector = CreateDataVector<int16_t>(constTensor.GetMemoryArea(), constTensor.GetNumBytes());
             flatbuffers::Offset<serializer::ShortData> flatBuffersData = serializer::CreateShortData(
@@ -321,8 +320,8 @@
             fbPayload = flatBuffersData.o;
             break;
         }
-        case DataType::QuantisedAsymm8:
-        case DataType::Boolean:
+        case armnn::DataType::QuantisedAsymm8:
+        case armnn::DataType::Boolean:
         default:
         {
             auto fbVector = CreateDataVector<int8_t>(constTensor.GetMemoryArea(), constTensor.GetNumBytes());
@@ -340,7 +339,8 @@
     return flatBufferConstTensor;
 }
 
-std::vector<fb::Offset<serializer::InputSlot>> SerializerVisitor::CreateInputSlots(const IConnectableLayer* layer)
+std::vector<fb::Offset<serializer::InputSlot>>
+    SerializerVisitor::CreateInputSlots(const armnn::IConnectableLayer* layer)
 {
     std::vector<fb::Offset<serializer::InputSlot>> inputSlots;
 
@@ -361,7 +361,8 @@
     return inputSlots;
 }
 
-std::vector<fb::Offset<serializer::OutputSlot>> SerializerVisitor::CreateOutputSlots(const IConnectableLayer* layer)
+std::vector<fb::Offset<serializer::OutputSlot>>
+    SerializerVisitor::CreateOutputSlots(const armnn::IConnectableLayer* layer)
 {
     std::vector<fb::Offset<serializer::OutputSlot>> outputSlots;
 
@@ -369,7 +370,7 @@
     for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
     {
         const IOutputSlot& outputSlot = layer->GetOutputSlot(slotIndex);
-        const TensorInfo& tensorInfo = outputSlot.GetTensorInfo();
+        const armnn::TensorInfo& tensorInfo = outputSlot.GetTensorInfo();
 
         // Get the dimensions
         std::vector<unsigned int> shape;
diff --git a/src/armnnSerializer/Serializer.hpp b/src/armnnSerializer/Serializer.hpp
index 907d4ed..aa765a2 100644
--- a/src/armnnSerializer/Serializer.hpp
+++ b/src/armnnSerializer/Serializer.hpp
@@ -37,7 +37,7 @@
         return m_outputIds;
     }
 
-    std::vector<flatbuffers::Offset<armnn::armnnSerializer::AnyLayer>>& GetSerializedLayers()
+    std::vector<flatbuffers::Offset<armnnSerializer::AnyLayer>>& GetSerializedLayers()
     {
         return m_serializedLayers;
     }
@@ -83,15 +83,15 @@
 private:
 
     /// Creates the Input Slots and Output Slots and LayerBase for the layer.
-    flatbuffers::Offset<armnn::armnnSerializer::LayerBase> CreateLayerBase(
+    flatbuffers::Offset<armnnSerializer::LayerBase> CreateLayerBase(
             const armnn::IConnectableLayer* layer,
-            const armnn::armnnSerializer::LayerType layerType);
+            const armnnSerializer::LayerType layerType);
 
     /// Creates the serializer AnyLayer for the layer and adds it to m_serializedLayers.
-    void CreateAnyLayer(const flatbuffers::Offset<void>& layer, const armnn::armnnSerializer::Layer serializerLayer);
+    void CreateAnyLayer(const flatbuffers::Offset<void>& layer, const armnnSerializer::Layer serializerLayer);
 
     /// Creates the serializer ConstTensor for the armnn ConstTensor.
-    flatbuffers::Offset<armnn::armnnSerializer::ConstTensor> CreateConstTensorInfo(
+    flatbuffers::Offset<armnnSerializer::ConstTensor> CreateConstTensorInfo(
             const armnn::ConstTensor& constTensor);
 
     template <typename T>
@@ -101,18 +101,18 @@
     uint32_t GetSerializedId(unsigned int guid);
 
     /// Creates the serializer InputSlots for the layer.
-    std::vector<flatbuffers::Offset<armnn::armnnSerializer::InputSlot>> CreateInputSlots(
+    std::vector<flatbuffers::Offset<armnnSerializer::InputSlot>> CreateInputSlots(
             const armnn::IConnectableLayer* layer);
 
     /// Creates the serializer OutputSlots for the layer.
-    std::vector<flatbuffers::Offset<armnn::armnnSerializer::OutputSlot>> CreateOutputSlots(
+    std::vector<flatbuffers::Offset<armnnSerializer::OutputSlot>> CreateOutputSlots(
             const armnn::IConnectableLayer* layer);
 
     /// FlatBufferBuilder to create our layers' FlatBuffers.
     flatbuffers::FlatBufferBuilder m_flatBufferBuilder;
 
     /// AnyLayers required by the SerializedGraph.
-    std::vector<flatbuffers::Offset<armnn::armnnSerializer::AnyLayer>> m_serializedLayers;
+    std::vector<flatbuffers::Offset<armnnSerializer::AnyLayer>> m_serializedLayers;
 
     /// Guids of all Input Layers required by the SerializedGraph.
     std::vector<unsigned int> m_inputIds;
diff --git a/src/armnnSerializer/SerializerUtils.cpp b/src/armnnSerializer/SerializerUtils.cpp
index 2bad85e..592f258 100644
--- a/src/armnnSerializer/SerializerUtils.cpp
+++ b/src/armnnSerializer/SerializerUtils.cpp
@@ -9,91 +9,90 @@
 {
 
 using namespace armnn;
-namespace serializer = armnn::armnnSerializer;
 
-serializer::ConstTensorData GetFlatBufferConstTensorData(DataType dataType)
+armnnSerializer::ConstTensorData GetFlatBufferConstTensorData(armnn::DataType dataType)
 {
     switch (dataType)
     {
-        case DataType::Float32:
-        case DataType::Signed32:
-            return serializer::ConstTensorData::ConstTensorData_IntData;
-        case DataType::Float16:
-            return serializer::ConstTensorData::ConstTensorData_ShortData;
-        case DataType::QuantisedAsymm8:
-        case DataType::Boolean:
-            return serializer::ConstTensorData::ConstTensorData_ByteData;
+        case armnn::DataType::Float32:
+        case armnn::DataType::Signed32:
+            return armnnSerializer::ConstTensorData::ConstTensorData_IntData;
+        case armnn::DataType::Float16:
+            return armnnSerializer::ConstTensorData::ConstTensorData_ShortData;
+        case armnn::DataType::QuantisedAsymm8:
+        case armnn::DataType::Boolean:
+            return armnnSerializer::ConstTensorData::ConstTensorData_ByteData;
         default:
-            return serializer::ConstTensorData::ConstTensorData_NONE;
+            return armnnSerializer::ConstTensorData::ConstTensorData_NONE;
     }
 }
 
-serializer::DataType GetFlatBufferDataType(DataType dataType)
+armnnSerializer::DataType GetFlatBufferDataType(armnn::DataType dataType)
 {
     switch (dataType)
     {
-        case DataType::Float32:
-            return serializer::DataType::DataType_Float32;
-        case DataType::Float16:
-            return serializer::DataType::DataType_Float16;
-        case DataType::Signed32:
-            return serializer::DataType::DataType_Signed32;
-        case DataType::QuantisedAsymm8:
-            return serializer::DataType::DataType_QuantisedAsymm8;
-        case DataType::Boolean:
-            return serializer::DataType::DataType_Boolean;
+        case armnn::DataType::Float32:
+            return armnnSerializer::DataType::DataType_Float32;
+        case armnn::DataType::Float16:
+            return armnnSerializer::DataType::DataType_Float16;
+        case armnn::DataType::Signed32:
+            return armnnSerializer::DataType::DataType_Signed32;
+        case armnn::DataType::QuantisedAsymm8:
+            return armnnSerializer::DataType::DataType_QuantisedAsymm8;
+        case armnn::DataType::Boolean:
+            return armnnSerializer::DataType::DataType_Boolean;
         default:
-            return serializer::DataType::DataType_Float16;
+            return armnnSerializer::DataType::DataType_Float16;
     }
 }
 
-serializer::DataLayout GetFlatBufferDataLayout(DataLayout dataLayout)
+armnnSerializer::DataLayout GetFlatBufferDataLayout(armnn::DataLayout dataLayout)
 {
     switch (dataLayout)
     {
-        case DataLayout::NHWC:
-            return serializer::DataLayout::DataLayout_NHWC;
-        case DataLayout::NCHW:
+        case armnn::DataLayout::NHWC:
+            return armnnSerializer::DataLayout::DataLayout_NHWC;
+        case armnn::DataLayout::NCHW:
         default:
-            return serializer::DataLayout::DataLayout_NCHW;
+            return armnnSerializer::DataLayout::DataLayout_NCHW;
     }
 }
 
-serializer::PoolingAlgorithm GetFlatBufferPoolingAlgorithm(PoolingAlgorithm poolingAlgorithm)
+armnnSerializer::PoolingAlgorithm GetFlatBufferPoolingAlgorithm(armnn::PoolingAlgorithm poolingAlgorithm)
 {
     switch (poolingAlgorithm)
     {
-        case PoolingAlgorithm::Average:
-            return serializer::PoolingAlgorithm::PoolingAlgorithm_Average;
-        case PoolingAlgorithm::L2:
-            return serializer::PoolingAlgorithm::PoolingAlgorithm_L2;
-        case PoolingAlgorithm::Max:
+        case armnn::PoolingAlgorithm::Average:
+            return armnnSerializer::PoolingAlgorithm::PoolingAlgorithm_Average;
+        case armnn::PoolingAlgorithm::L2:
+            return armnnSerializer::PoolingAlgorithm::PoolingAlgorithm_L2;
+        case armnn::PoolingAlgorithm::Max:
         default:
-            return serializer::PoolingAlgorithm::PoolingAlgorithm_Max;
+            return armnnSerializer::PoolingAlgorithm::PoolingAlgorithm_Max;
     }
 }
 
-serializer::OutputShapeRounding GetFlatBufferOutputShapeRounding(OutputShapeRounding outputShapeRounding)
+armnnSerializer::OutputShapeRounding GetFlatBufferOutputShapeRounding(armnn::OutputShapeRounding outputShapeRounding)
 {
     switch (outputShapeRounding)
     {
-        case OutputShapeRounding::Ceiling:
-            return serializer::OutputShapeRounding::OutputShapeRounding_Ceiling;
-        case OutputShapeRounding::Floor:
+        case armnn::OutputShapeRounding::Ceiling:
+            return armnnSerializer::OutputShapeRounding::OutputShapeRounding_Ceiling;
+        case armnn::OutputShapeRounding::Floor:
         default:
-            return serializer::OutputShapeRounding::OutputShapeRounding_Floor;
+            return armnnSerializer::OutputShapeRounding::OutputShapeRounding_Floor;
     }
 }
 
-serializer::PaddingMethod GetFlatBufferPaddingMethod(PaddingMethod paddingMethod)
+armnnSerializer::PaddingMethod GetFlatBufferPaddingMethod(armnn::PaddingMethod paddingMethod)
 {
     switch (paddingMethod)
     {
-        case PaddingMethod::IgnoreValue:
-            return serializer::PaddingMethod::PaddingMethod_IgnoreValue;
-        case PaddingMethod::Exclude:
+        case armnn::PaddingMethod::IgnoreValue:
+            return armnnSerializer::PaddingMethod::PaddingMethod_IgnoreValue;
+        case armnn::PaddingMethod::Exclude:
         default:
-            return serializer::PaddingMethod::PaddingMethod_Exclude;
+            return armnnSerializer::PaddingMethod::PaddingMethod_Exclude;
     }
 }
 
diff --git a/src/armnnSerializer/SerializerUtils.hpp b/src/armnnSerializer/SerializerUtils.hpp
index 06f3076..71eb01b 100644
--- a/src/armnnSerializer/SerializerUtils.hpp
+++ b/src/armnnSerializer/SerializerUtils.hpp
@@ -11,17 +11,17 @@
 namespace armnnSerializer
 {
 
-armnn::armnnSerializer::ConstTensorData GetFlatBufferConstTensorData(armnn::DataType dataType);
+armnnSerializer::ConstTensorData GetFlatBufferConstTensorData(armnn::DataType dataType);
 
-armnn::armnnSerializer::DataType GetFlatBufferDataType(armnn::DataType dataType);
+armnnSerializer::DataType GetFlatBufferDataType(armnn::DataType dataType);
 
-armnn::armnnSerializer::DataLayout GetFlatBufferDataLayout(armnn::DataLayout dataLayout);
+armnnSerializer::DataLayout GetFlatBufferDataLayout(armnn::DataLayout dataLayout);
 
-armnn::armnnSerializer::PoolingAlgorithm GetFlatBufferPoolingAlgorithm(armnn::PoolingAlgorithm poolingAlgorithm);
+armnnSerializer::PoolingAlgorithm GetFlatBufferPoolingAlgorithm(armnn::PoolingAlgorithm poolingAlgorithm);
 
-armnn::armnnSerializer::OutputShapeRounding GetFlatBufferOutputShapeRounding(
+armnnSerializer::OutputShapeRounding GetFlatBufferOutputShapeRounding(
     armnn::OutputShapeRounding outputShapeRounding);
 
-armnn::armnnSerializer::PaddingMethod GetFlatBufferPaddingMethod(armnn::PaddingMethod paddingMethod);
+armnnSerializer::PaddingMethod GetFlatBufferPaddingMethod(armnn::PaddingMethod paddingMethod);
 
 } // namespace armnnSerializer
\ No newline at end of file
diff --git a/src/armnnSerializer/test/SerializerTests.cpp b/src/armnnSerializer/test/SerializerTests.cpp
index a88193d..7dad6ac 100644
--- a/src/armnnSerializer/test/SerializerTests.cpp
+++ b/src/armnnSerializer/test/SerializerTests.cpp
@@ -8,7 +8,7 @@
 
 #include "../Serializer.hpp"
 
-#include <armnnDeserializeParser/IDeserializeParser.hpp>
+#include <armnnDeserializer/IDeserializer.hpp>
 
 #include <random>
 #include <sstream>
@@ -17,7 +17,7 @@
 #include <boost/test/unit_test.hpp>
 #include <flatbuffers/idl.h>
 
-using armnnDeserializeParser::IDeserializeParser;
+using armnnDeserializer::IDeserializer;
 
 namespace
 {
@@ -25,7 +25,7 @@
 armnn::INetworkPtr DeserializeNetwork(const std::string& serializerString)
 {
     std::vector<std::uint8_t> const serializerVector{serializerString.begin(), serializerString.end()};
-    return armnnDeserializeParser::IDeserializeParser::Create()->CreateNetworkFromBinary(serializerVector);
+    return IDeserializer::Create()->CreateNetworkFromBinary(serializerVector);
 }
 
 std::string SerializeNetwork(const armnn::INetwork& network)