IVGCVSW-3039 Unify BindingPointInfo declarations

Change-Id: I3deb2b9a37e8a8f8f2ed93c64ed0656ae911e24c
Signed-off-by: Jim Flynn <jim.flynn@arm.com>
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp
index fa71b30..d2d2ca3 100644
--- a/tests/InferenceModel.hpp
+++ b/tests/InferenceModel.hpp
@@ -74,9 +74,7 @@
 
 namespace InferenceModelInternal
 {
-// This needs to go when the armnnCaffeParser, armnnTfParser and armnnTfLiteParser
-// definitions of BindingPointInfo gets consolidated.
-using BindingPointInfo = std::pair<armnn::LayerBindingId, armnn::TensorInfo>;
+using BindingPointInfo = armnn::BindingPointInfo;
 
 using QuantizationParams = std::pair<float,int32_t>;
 
@@ -108,11 +106,10 @@
 {
 public:
     using Params = InferenceModelInternal::Params;
-    using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
 
     static armnn::INetworkPtr Create(const Params& params,
-                                     std::vector<BindingPointInfo>& inputBindings,
-                                     std::vector<BindingPointInfo>& outputBindings)
+                                     std::vector<armnn::BindingPointInfo>& inputBindings,
+                                     std::vector<armnn::BindingPointInfo>& outputBindings)
     {
         const std::string& modelPath = params.m_ModelPath;
 
@@ -169,11 +166,10 @@
 public:
     using IParser          = armnnDeserializer::IDeserializer;
     using Params           = InferenceModelInternal::Params;
-    using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
 
     static armnn::INetworkPtr Create(const Params& params,
-                                     std::vector<BindingPointInfo>& inputBindings,
-                                     std::vector<BindingPointInfo>& outputBindings)
+                                     std::vector<armnn::BindingPointInfo>& inputBindings,
+                                     std::vector<armnn::BindingPointInfo>& outputBindings)
     {
         auto parser(IParser::Create());
         BOOST_ASSERT(parser);
@@ -226,11 +222,10 @@
 public:
     using IParser = armnnTfLiteParser::ITfLiteParser;
     using Params = InferenceModelInternal::Params;
-    using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
 
     static armnn::INetworkPtr Create(const Params& params,
-                                     std::vector<BindingPointInfo>& inputBindings,
-                                     std::vector<BindingPointInfo>& outputBindings)
+                                     std::vector<armnn::BindingPointInfo>& inputBindings,
+                                     std::vector<armnn::BindingPointInfo>& outputBindings)
     {
         const std::string& modelPath = params.m_ModelPath;
 
@@ -246,14 +241,14 @@
 
         for (const std::string& inputLayerName : params.m_InputBindings)
         {
-            BindingPointInfo inputBinding =
+            armnn::BindingPointInfo inputBinding =
                 parser->GetNetworkInputBindingInfo(params.m_SubgraphId, inputLayerName);
             inputBindings.push_back(inputBinding);
         }
 
         for (const std::string& outputLayerName : params.m_OutputBindings)
         {
-            BindingPointInfo outputBinding =
+            armnn::BindingPointInfo outputBinding =
                 parser->GetNetworkOutputBindingInfo(params.m_SubgraphId, outputLayerName);
             outputBindings.push_back(outputBinding);
         }
@@ -309,7 +304,7 @@
 
 template<typename TContainer>
 inline armnn::InputTensors MakeInputTensors(
-    const std::vector<InferenceModelInternal::BindingPointInfo>& inputBindings,
+    const std::vector<armnn::BindingPointInfo>& inputBindings,
     const std::vector<TContainer>& inputDataContainers)
 {
     armnn::InputTensors inputTensors;
@@ -323,7 +318,7 @@
 
     for (size_t i = 0; i < numInputs; i++)
     {
-        const InferenceModelInternal::BindingPointInfo& inputBinding = inputBindings[i];
+        const armnn::BindingPointInfo& inputBinding = inputBindings[i];
         const TContainer& inputData = inputDataContainers[i];
 
         boost::apply_visitor([&](auto&& value)
@@ -344,7 +339,7 @@
 
 template<typename TContainer>
 inline armnn::OutputTensors MakeOutputTensors(
-    const std::vector<InferenceModelInternal::BindingPointInfo>& outputBindings,
+    const std::vector<armnn::BindingPointInfo>& outputBindings,
     std::vector<TContainer>& outputDataContainers)
 {
     armnn::OutputTensors outputTensors;
@@ -358,7 +353,7 @@
 
     for (size_t i = 0; i < numOutputs; i++)
     {
-        const InferenceModelInternal::BindingPointInfo& outputBinding = outputBindings[i];
+        const armnn::BindingPointInfo& outputBinding = outputBindings[i];
         TContainer& outputData = outputDataContainers[i];
 
         boost::apply_visitor([&](auto&& value)
@@ -383,7 +378,6 @@
 public:
     using DataType           = TDataType;
     using Params             = InferenceModelInternal::Params;
-    using BindingPointInfo   = InferenceModelInternal::BindingPointInfo;
     using QuantizationParams = InferenceModelInternal::QuantizationParams;
     using TContainer         = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
 
@@ -564,24 +558,24 @@
         }
     }
 
-    const BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const
+    const armnn::BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const
     {
         CheckInputIndexIsValid(inputIndex);
         return m_InputBindings[inputIndex];
     }
 
-    const std::vector<BindingPointInfo>& GetInputBindingInfos() const
+    const std::vector<armnn::BindingPointInfo>& GetInputBindingInfos() const
     {
         return m_InputBindings;
     }
 
-    const BindingPointInfo& GetOutputBindingInfo(unsigned int outputIndex = 0u) const
+    const armnn::BindingPointInfo& GetOutputBindingInfo(unsigned int outputIndex = 0u) const
     {
         CheckOutputIndexIsValid(outputIndex);
         return m_OutputBindings[outputIndex];
     }
 
-    const std::vector<BindingPointInfo>& GetOutputBindingInfos() const
+    const std::vector<armnn::BindingPointInfo>& GetOutputBindingInfos() const
     {
         return m_OutputBindings;
     }
@@ -614,8 +608,8 @@
     armnn::NetworkId m_NetworkIdentifier;
     std::shared_ptr<armnn::IRuntime> m_Runtime;
 
-    std::vector<InferenceModelInternal::BindingPointInfo> m_InputBindings;
-    std::vector<InferenceModelInternal::BindingPointInfo> m_OutputBindings;
+    std::vector<armnn::BindingPointInfo> m_InputBindings;
+    std::vector<armnn::BindingPointInfo> m_OutputBindings;
     bool m_EnableProfiling;
 
     template<typename TContainer>