IVGCVSW-7626 Change sequence of Interpreter Building

Signed-off-by: Narumol Prangnawarat <narumol.prangnawarat@arm.com>
Change-Id: I3f0e224c90a4eea9945183028c9de1b61e75e510
diff --git a/tests/ExecuteNetwork/TfliteExecutor.cpp b/tests/ExecuteNetwork/TfliteExecutor.cpp
index 04f6ddb..8412750 100644
--- a/tests/ExecuteNetwork/TfliteExecutor.cpp
+++ b/tests/ExecuteNetwork/TfliteExecutor.cpp
@@ -23,14 +23,6 @@
     tflite::ops::builtin::BuiltinOpResolver resolver;
 
     tflite::InterpreterBuilder builder(*m_Model, resolver);
-    if (builder(&m_TfLiteInterpreter) != kTfLiteOk)
-    {
-        LogAndThrow("Error loading the model into the TfLiteInterpreter.");
-    }
-    if (m_TfLiteInterpreter->AllocateTensors() != kTfLiteOk)
-    {
-        LogAndThrow("Failed to allocate tensors in the TfLiteInterpreter.");
-    }
 
     if (m_Params.m_TfLiteExecutor == ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteOpaqueDelegate)
     {
@@ -51,6 +43,11 @@
 
         // Add Delegate to the builder
         builder.AddDelegate(armnnDelegate.get());
+        if (builder(&m_TfLiteInterpreter) != kTfLiteOk)
+        {
+            LogAndThrow("Error loading the model into the TfLiteInterpreter.");
+        }
+
 #else
         LogAndThrow("Not built with Arm NN Tensorflow-Lite opaque delegate support.");
 #endif
@@ -58,6 +55,10 @@
     else if (m_Params.m_TfLiteExecutor == ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteDelegate)
     {
 #if defined(ARMNN_TFLITE_DELEGATE)
+        if (builder(&m_TfLiteInterpreter) != kTfLiteOk)
+        {
+            LogAndThrow("Error loading the model into the TfLiteInterpreter.");
+        }
         // Create the Armnn Delegate
         // Populate a DelegateOptions from the ExecuteNetworkParams.
         armnnDelegate::DelegateOptions delegateOptions = m_Params.ToDelegateOptions();
@@ -79,6 +80,11 @@
         std::cout << "Running on TfLite without ArmNN delegate\n";
     }
 
+    if (m_TfLiteInterpreter->AllocateTensors() != kTfLiteOk)
+    {
+        LogAndThrow("Failed to allocate tensors in the TfLiteInterpreter.");
+    }
+
     const size_t numInputs = m_TfLiteInterpreter->inputs().size();
 
     for(unsigned int inputIndex = 0; inputIndex < numInputs; ++inputIndex)