IVGCVSW-6249 Add ProfilingDetails Macros to all workloads in Ref, Neon, CL

 * Add functionality to only output network details in ExNet

Signed-off-by: Keith Davis <keith.davis@arm.com>
Change-Id: I0c45e67193f308ce7b86f1bb1a918a266fefba2e
diff --git a/tests/ExecuteNetwork/ExecuteNetwork.cpp b/tests/ExecuteNetwork/ExecuteNetwork.cpp
index 64296d3..9a48645 100644
--- a/tests/ExecuteNetwork/ExecuteNetwork.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetwork.cpp
@@ -325,6 +325,7 @@
         inferenceModelParams.m_AsyncEnabled                   = params.m_Concurrent;
         inferenceModelParams.m_ThreadPoolSize                 = params.m_ThreadPoolSize;
         inferenceModelParams.m_OutputDetailsToStdOut          = params.m_OutputDetailsToStdOut;
+        inferenceModelParams.m_OutputDetailsOnlyToStdOut      = params.m_OutputDetailsOnlyToStdOut;
 
         for(const std::string& inputName: params.m_InputNames)
         {
@@ -769,7 +770,9 @@
         return EXIT_FAILURE;
     }
 
-    if (ProgramOptions.m_ExNetParams.m_OutputDetailsToStdOut && !ProgramOptions.m_ExNetParams.m_EnableProfiling)
+    if ((ProgramOptions.m_ExNetParams.m_OutputDetailsToStdOut ||
+         ProgramOptions.m_ExNetParams.m_OutputDetailsOnlyToStdOut)
+         && !ProgramOptions.m_ExNetParams.m_EnableProfiling)
     {
         ARMNN_LOG(fatal) << "You must enable profiling if you would like to output layer details";
         return EXIT_FAILURE;
diff --git a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
index 97c605b..e519b02 100644
--- a/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
+++ b/tests/ExecuteNetwork/ExecuteNetworkParams.hpp
@@ -44,6 +44,7 @@
     std::string                   m_ModelPath;
     unsigned int                  m_NumberOfThreads;
     bool                          m_OutputDetailsToStdOut;
+    bool                          m_OutputDetailsOnlyToStdOut;
     std::vector<std::string>      m_OutputNames;
     std::vector<std::string>      m_OutputTensorFiles;
     std::vector<std::string>      m_OutputTypes;
diff --git a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
index 1fd4b3d..927d804 100644
--- a/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
+++ b/tests/ExecuteNetwork/ExecuteNetworkProgramOptions.cpp
@@ -410,9 +410,14 @@
                  cxxopts::value<uint32_t>(m_RuntimeOptions.m_ProfilingOptions.m_CapturePeriod)->default_value("150"))
 
                 ("output-network-details",
-                 "Outputs layer tensor infos and descriptors to std out. Defaults to off.",
+                 "Outputs layer tensor infos and descriptors to std out along with profiling events. Defaults to off.",
                  cxxopts::value<bool>(m_ExNetParams.m_OutputDetailsToStdOut)->default_value("false")
-                                                                            ->implicit_value("true"));
+                                                                            ->implicit_value("true"))
+                ("output-network-details-only",
+                 "Outputs layer tensor infos and descriptors to std out without profiling events. Defaults to off.",
+                 cxxopts::value<bool>(m_ExNetParams.m_OutputDetailsOnlyToStdOut)->default_value("false")
+                                                                                ->implicit_value("true"));
+
     }
     catch (const std::exception& e)
     {
diff --git a/tests/InferenceModel.hpp b/tests/InferenceModel.hpp
index 1db287f..b982df3 100644
--- a/tests/InferenceModel.hpp
+++ b/tests/InferenceModel.hpp
@@ -101,6 +101,7 @@
     bool                            m_EnableFastMath;
     bool                            m_SaveCachedNetwork;
     bool                            m_OutputDetailsToStdOut;
+    bool                            m_OutputDetailsOnlyToStdOut;
     std::string                     m_CachedNetworkFilePath;
     unsigned int                    m_NumberOfThreads;
     std::string                     m_MLGOTuningFilePath;
@@ -121,6 +122,7 @@
         , m_EnableFastMath(false)
         , m_SaveCachedNetwork(false)
         , m_OutputDetailsToStdOut(false)
+        , m_OutputDetailsOnlyToStdOut(false)
         , m_CachedNetworkFilePath("")
         , m_NumberOfThreads(0)
         , m_MLGOTuningFilePath("")
@@ -406,7 +408,8 @@
                    bool enableProfiling,
                    const std::string& dynamicBackendsPath,
                    const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
-        : m_EnableProfiling(enableProfiling)
+        : m_EnableProfiling(enableProfiling),
+          m_ProfilingDetailsMethod(armnn::ProfilingDetailsMethod::Undefined)
         , m_DynamicBackendsPath(dynamicBackendsPath)
     {
         if (runtime)
@@ -421,6 +424,12 @@
             m_Runtime = armnn::IRuntime::Create(options);
         }
 
+        // Configure the Profiler if the the profiling details are opted for
+        if (params.m_OutputDetailsOnlyToStdOut)
+            m_ProfilingDetailsMethod = armnn::ProfilingDetailsMethod::DetailsOnly;
+        else if (params.m_OutputDetailsToStdOut)
+            m_ProfilingDetailsMethod = armnn::ProfilingDetailsMethod::DetailsWithEvents;
+
         std::string invalidBackends;
         if (!CheckRequestedBackendsAreValid(params.m_ComputeDevices, armnn::Optional<std::string&>(invalidBackends)))
         {
@@ -492,7 +501,7 @@
                                                         armnn::MemorySource::Undefined,
                                                         armnn::MemorySource::Undefined,
                                                         enableProfiling,
-                                                        params.m_OutputDetailsToStdOut);
+                                                        m_ProfilingDetailsMethod);
             std::string errorMessage;
             ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet), errorMessage, networkProperties);
 
@@ -744,6 +753,7 @@
     std::vector<armnn::BindingPointInfo> m_InputBindings;
     std::vector<armnn::BindingPointInfo> m_OutputBindings;
     bool m_EnableProfiling;
+    armnn::ProfilingDetailsMethod m_ProfilingDetailsMethod;
     std::string m_DynamicBackendsPath;
 
     template<typename TContainer>