IVGCVSW-6611 ExNet prints inference time twice

 * Created individual IRuntime sharedptr in ExecuteNetwork main() each time
   MainImpl() is called. Prevents additional runtime being created when the
   delegate is used.

Signed-off-by: Cathal Corbett <cathal.corbett@arm.com>
Change-Id: Ia4b508fbf2bbd25467c6235fed2f05662a7aecc0
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp
index 540bfd4..085721c 100644
--- a/tests/ExecuteNetwork/ExecuteNetwork.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp
@@ -899,15 +899,13 @@
         return EXIT_FAILURE;
     }
 
-    // Create runtime
-    std::shared_ptr<armnn::IRuntime> runtime(armnn::IRuntime::Create(ProgramOptions.m_RuntimeOptions));
-
     std::string modelFormat = ProgramOptions.m_ExNetParams.m_ModelFormat;
 
     // Forward to implementation based on the parser type
     if (modelFormat.find("armnn") != std::string::npos)
     {
     #if defined(ARMNN_SERIALIZER)
+        std::shared_ptr<armnn::IRuntime> runtime(armnn::IRuntime::Create(ProgramOptions.m_RuntimeOptions));
         return MainImpl<armnnDeserializer::IDeserializer, float>(ProgramOptions.m_ExNetParams, runtime);
     #else
         ARMNN_LOG(fatal) << "Not built with serialization support.";
@@ -917,6 +915,7 @@
     else if (modelFormat.find("onnx") != std::string::npos)
     {
     #if defined(ARMNN_ONNX_PARSER)
+        std::shared_ptr<armnn::IRuntime> runtime(armnn::IRuntime::Create(ProgramOptions.m_RuntimeOptions));
         return MainImpl<armnnOnnxParser::IOnnxParser, float>(ProgramOptions.m_ExNetParams, runtime);
     #else
         ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
@@ -928,10 +927,11 @@
         if (ProgramOptions.m_ExNetParams.m_TfLiteExecutor == ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteParser)
         {
             #if defined(ARMNN_TF_LITE_PARSER)
-                        return MainImpl<armnnTfLiteParser::ITfLiteParser, float>(ProgramOptions.m_ExNetParams, runtime);
+                std::shared_ptr<armnn::IRuntime> runtime(armnn::IRuntime::Create(ProgramOptions.m_RuntimeOptions));
+                return MainImpl<armnnTfLiteParser::ITfLiteParser, float>(ProgramOptions.m_ExNetParams, runtime);
             #else
-                        ARMNN_LOG(fatal) << "Not built with Tensorflow-Lite parser support.";
-                        return EXIT_FAILURE;
+                ARMNN_LOG(fatal) << "Not built with Tensorflow-Lite parser support.";
+                return EXIT_FAILURE;
             #endif
         }
         else if (ProgramOptions.m_ExNetParams.m_TfLiteExecutor ==