IVGCVSW-7197 Implement Pimpl Idiom for OptimizerOptions

Signed-off-by: John Mcloughlin <john.mcloughlin@arm.com>
Change-Id: Id4bdc31e3e6f18ccaef232c29a2d2825c915b21c
diff --git a/tests/TfLiteYoloV3Big-Armnn/TfLiteYoloV3Big-Armnn.cpp b/tests/TfLiteYoloV3Big-Armnn/TfLiteYoloV3Big-Armnn.cpp
index 75bc9a3..3ecd160 100644
--- a/tests/TfLiteYoloV3Big-Armnn/TfLiteYoloV3Big-Armnn.cpp
+++ b/tests/TfLiteYoloV3Big-Armnn/TfLiteYoloV3Big-Armnn.cpp
@@ -128,8 +128,8 @@
     ARMNN_LOG(debug) << "Model loaded ok: " << filename;
 
     // Optimize backbone model
-    OptimizerOptions options;
-    options.m_ImportEnabled = enableImport != ImportMemory::False;
+    OptimizerOptionsOpaque options;
+    options.SetImportEnabled(enableImport != ImportMemory::False);
     auto optimizedModel = Optimize(*model, backendPreferences, runtime.GetDeviceSpec(), options);
     if (!optimizedModel)
     {
@@ -149,7 +149,7 @@
     {
         std::string errorMessage;
 
-        armnn::MemorySource memSource = options.m_ImportEnabled ? armnn::MemorySource::Malloc
+        armnn::MemorySource memSource = options.GetImportEnabled() ? armnn::MemorySource::Malloc
                                                                 : armnn::MemorySource::Undefined;
         INetworkProperties modelProps(false, memSource, memSource);
         Status status = runtime.LoadNetwork(networkId, std::move(optimizedModel), errorMessage, modelProps);