IVGCVSW-4206 Optionally parse unsupported ops in ExecuteNetwork

Change-Id: I593e2540bd870d70aabb2c959f4e63a899967269
Signed-off-by: Derek Lamberti <derek.lamberti@arm.com>
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp
index 9e054c4..6ec63ba 100644
--- a/tests/InferenceModel.hpp
+++ b/tests/InferenceModel.hpp
@@ -93,6 +93,7 @@
     bool                            m_VisualizePostOptimizationModel;
     bool                            m_EnableFp16TurboMode;
     bool                            m_PrintIntermediateLayers;
+    bool                            m_ParseUnsupported;
 
     Params()
         : m_ComputeDevices{}
@@ -101,6 +102,7 @@
         , m_VisualizePostOptimizationModel(false)
         , m_EnableFp16TurboMode(false)
         , m_PrintIntermediateLayers(false)
+        , m_ParseUnsupported(false)
     {}
 };
 
@@ -235,7 +237,9 @@
         const std::string& modelPath = params.m_ModelPath;
 
         // Create a network from a file on disk
-        auto parser(IParser::Create());
+        IParser::TfLiteParserOptions options;
+        options.m_StandInLayerForUnsupported = params.m_ParseUnsupported;
+        auto parser(IParser::Create(options));
 
         armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};