IVGCVSW-7626 Add Execute Network for Opaque Delegate

Signed-off-by: Narumol Prangnawarat <narumol.prangnawarat@arm.com>
Change-Id: Ibdded86713368ecfdf31c4118dfe8a3404d1e3b8
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp
index 14841ec..f9f583a 100644
--- a/tests/ExecuteNetwork/ExecuteNetwork.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp
@@ -5,7 +5,7 @@
 
 #include "ExecuteNetworkProgramOptions.hpp"
 #include "ArmNNExecutor.hpp"
-#if defined(ARMNN_TFLITE_DELEGATE)
+#if defined(ARMNN_TFLITE_DELEGATE) || defined(ARMNN_TFLITE_OPAQUE_DELEGATE)
 #include "TfliteExecutor.hpp"
 #endif
 #include <armnn/Logging.hpp>
@@ -13,10 +13,12 @@
 
 std::unique_ptr<IExecutor> BuildExecutor(ProgramOptions& programOptions)
 {
-    if (programOptions.m_ExNetParams.m_TfLiteExecutor == ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteDelegate ||
+    if (programOptions.m_ExNetParams.m_TfLiteExecutor ==
+            ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteOpaqueDelegate ||
+        programOptions.m_ExNetParams.m_TfLiteExecutor == ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteDelegate ||
         programOptions.m_ExNetParams.m_TfLiteExecutor == ExecuteNetworkParams::TfLiteExecutor::TfliteInterpreter)
     {
-#if defined(ARMNN_TFLITE_DELEGATE)
+#if defined(ARMNN_TFLITE_DELEGATE) || defined(ARMNN_TFLITE_OPAQUE_DELEGATE)
         return std::make_unique<TfLiteExecutor>(programOptions.m_ExNetParams, programOptions.m_RuntimeOptions);
 #else
         ARMNN_LOG(fatal) << "Not built with Arm NN Tensorflow-Lite delegate support.";
diff --git a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
index 020dbdc..ffcb4f4 100644
--- a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
+++ b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
@@ -20,7 +20,8 @@
     {
         ArmNNTfLiteParser,
         ArmNNTfLiteDelegate,
-        TfliteInterpreter
+        TfliteInterpreter,
+        ArmNNTfLiteOpaqueDelegate,
     };
 
     bool                              m_AllowExpandedDims;
diff --git a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
index 3a54b1a..8d5035e 100644
--- a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
@@ -355,6 +355,7 @@
                  "Set the executor for the tflite model: parser, delegate, tflite"
                  "parser is the ArmNNTfLiteParser, "
                  "delegate is the ArmNNTfLiteDelegate, "
+                 "opaquedelegate is the ArmNNTfLiteOpaqueDelegate, "
                  "tflite is the TfliteInterpreter",
                  cxxopts::value<std::string>()->default_value("parser"))
 
@@ -539,6 +540,10 @@
     {
         m_ExNetParams.m_TfLiteExecutor = ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteParser;
     }
+    else if (tfliteExecutor == "opaquedelegate")
+    {
+        m_ExNetParams.m_TfLiteExecutor = ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteOpaqueDelegate;
+    }
     else if (tfliteExecutor == "delegate")
     {
         m_ExNetParams.m_TfLiteExecutor = ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteDelegate;
diff --git a/tests/ExecuteNetwork/TfliteExecutor.cpp b/tests/ExecuteNetwork/TfliteExecutor.cpp
index 87731c2..04f6ddb 100644
--- a/tests/ExecuteNetwork/TfliteExecutor.cpp
+++ b/tests/ExecuteNetwork/TfliteExecutor.cpp
@@ -3,6 +3,11 @@
 // SPDX-License-Identifier: MIT
 //
 
+#if defined(ARMNN_TFLITE_OPAQUE_DELEGATE)
+#include <../delegate/opaque/include/armnn_delegate.hpp>
+#endif
+
+#include <tensorflow/lite/core/c/c_api.h>
 #include "TfliteExecutor.hpp"
 #include "tensorflow/lite/kernels/kernel_util.h"
 
@@ -26,8 +31,33 @@
     {
         LogAndThrow("Failed to allocate tensors in the TfLiteInterpreter.");
     }
-    if (m_Params.m_TfLiteExecutor == ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteDelegate)
+
+    if (m_Params.m_TfLiteExecutor == ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteOpaqueDelegate)
     {
+#if defined(ARMNN_TFLITE_OPAQUE_DELEGATE)
+        // Use default settings until options have been enabled
+        flatbuffers::FlatBufferBuilder flatBufferBuilder;
+        TFLiteSettingsBuilder tfliteSettingsBuilder(flatBufferBuilder);
+        flatbuffers::Offset<TFLiteSettings> tfliteSettings = tfliteSettingsBuilder.Finish();
+        flatBufferBuilder.Finish(tfliteSettings);
+        const TFLiteSettings* settings =
+            flatbuffers::GetRoot<TFLiteSettings>(flatBufferBuilder.GetBufferPointer());
+
+        std::unique_ptr<delegates::DelegatePluginInterface> delegatePlugIn =
+            delegates::DelegatePluginRegistry::CreateByName("armnn_delegate", *settings);
+
+        // Create Armnn Opaque Delegate from Armnn Delegate Plugin
+        delegates::TfLiteDelegatePtr armnnDelegate = delegatePlugIn->Create();
+
+        // Add Delegate to the builder
+        builder.AddDelegate(armnnDelegate.get());
+#else
+        LogAndThrow("Not built with Arm NN Tensorflow-Lite opaque delegate support.");
+#endif
+    }
+    else if (m_Params.m_TfLiteExecutor == ExecuteNetworkParams::TfLiteExecutor::ArmNNTfLiteDelegate)
+    {
+#if defined(ARMNN_TFLITE_DELEGATE)
         // Create the Armnn Delegate
         // Populate a DelegateOptions from the ExecuteNetworkParams.
         armnnDelegate::DelegateOptions delegateOptions = m_Params.ToDelegateOptions();
@@ -40,6 +70,9 @@
         {
             LogAndThrow("Could not register ArmNN TfLite Delegate to TfLiteInterpreter.");
         }
+#else
+        LogAndThrow("Not built with Arm NN Tensorflow-Lite delegate support.");
+#endif
     }
     else
     {