Release 18.08
diff --git a/include/armnn/IRuntime.hpp b/include/armnn/IRuntime.hpp
index a1a3f0f..36efdbd 100644
--- a/include/armnn/IRuntime.hpp
+++ b/include/armnn/IRuntime.hpp
@@ -9,6 +9,7 @@
 #include "Types.hpp"
 #include "Tensor.hpp"
 #include "INetwork.hpp"
+#include "IProfiler.hpp"
 #include "TypesUtils.hpp"
 
 namespace armnn
@@ -16,7 +17,7 @@
 
 using NetworkId = int;
 
-class IClTunedParameters;
+class IGpuAccTunedParameters;
 
 class IRuntime;
 using IRuntimePtr = std::unique_ptr<IRuntime, void(*)(IRuntime* runtime)>;
@@ -26,66 +27,80 @@
 public:
     struct CreationOptions
     {
-        Compute m_DefaultComputeDevice;
-        bool m_UseCpuRefAsFallback;
-        /// If set, uses the CL tuned parameters from the given object when executing CL workloads.
-        /// It will also be updated with new tuned parameters if it is configured to do so.
-        IClTunedParameters* m_ClTunedParameters;
+        CreationOptions()
+            : m_GpuAccTunedParameters(nullptr)
+            , m_EnableGpuProfiling(false)
+        {}
 
-        CreationOptions(Compute defaultComputeDevice)
-            : m_DefaultComputeDevice(defaultComputeDevice)
-            , m_UseCpuRefAsFallback(true)
-            , m_ClTunedParameters(nullptr)
-        {
-        }
+        /// If set, uses the GpuAcc tuned parameters from the given object when executing GPU workloads.
+        /// It will also be updated with new tuned parameters if it is configured to do so.
+        std::shared_ptr<IGpuAccTunedParameters> m_GpuAccTunedParameters;
+
+        // Setting this flag will allow the user to obtain GPU profiling information from the runtime.
+        bool m_EnableGpuProfiling;
     };
 
     static IRuntime* CreateRaw(const CreationOptions& options);
     static IRuntimePtr Create(const CreationOptions& options);
     static void Destroy(IRuntime* runtime);
 
-    /// Load a complete network into the IRuntime.
-    /// @param [out] networkIdOut Unique identifier for the network is returned in this reference.
-    /// @param [in] network Complete network to load into the IRuntime.
+    /// Loads a complete network into the IRuntime.
+    /// @param [out] networkIdOut - Unique identifier for the network is returned in this reference.
+    /// @param [in] network - Complete network to load into the IRuntime.
     /// The runtime takes ownership of the network once passed in.
     /// @return armnn::Status
     virtual Status LoadNetwork(NetworkId& networkIdOut, IOptimizedNetworkPtr network) = 0;
 
+    /// Load a complete network into the IRuntime.
+    /// @param [out] networkIdOut Unique identifier for the network is returned in this reference.
+    /// @param [in] network Complete network to load into the IRuntime.
+    /// @param [out] errorMessage Error message if there were any errors.
+    /// The runtime takes ownership of the network once passed in.
+    /// @return armnn::Status
+    virtual Status LoadNetwork(NetworkId& networkIdOut,
+                               IOptimizedNetworkPtr network,
+                               std::string & errorMessage) = 0;
+
     virtual TensorInfo GetInputTensorInfo(NetworkId networkId, LayerBindingId layerId) const = 0;
     virtual TensorInfo GetOutputTensorInfo(NetworkId networkId, LayerBindingId layerId) const = 0;
 
-    // Evaluate network using input in inputTensors, outputs filled into outputTensors
+    /// Evaluates a network using input in inputTensors and outputs filled into outputTensors
     virtual Status EnqueueWorkload(NetworkId networkId,
-                           const InputTensors& inputTensors,
-                           const OutputTensors& outputTensors) = 0;
+                                   const InputTensors& inputTensors,
+                                   const OutputTensors& outputTensors) = 0;
 
-    /// Unload a network from the IRuntime.
+    /// Unloads a network from the IRuntime.
     /// At the moment this only removes the network from the m_Impl->m_Network.
     /// This might need more work in the future to be AndroidNN compliant.
-    /// @param [in] networkId Unique identifier for the network to be unloaded. Generated in LoadNetwork().
+    /// @param [in] networkId - Unique identifier for the network to be unloaded. Generated in LoadNetwork().
     /// @return armnn::Status
     virtual Status UnloadNetwork(NetworkId networkId) = 0;
 
-    virtual const DeviceSpec& GetDeviceSpec() const = 0;
+    virtual const IDeviceSpec& GetDeviceSpec() const = 0;
+
+    /// Gets the profiler corresponding to the given network id.
+    /// @param networkId The id of the network for which to get the profile.
+    /// @return A pointer to the requested profiler, or nullptr if not found.
+    virtual const std::shared_ptr<IProfiler> GetProfiler(NetworkId networkId) const = 0;
 
 protected:
     ~IRuntime() {}
 };
 
-using IClTunedParametersPtr = std::unique_ptr<IClTunedParameters, void(*)(IClTunedParameters* params)>;
+using IGpuAccTunedParametersPtr = std::shared_ptr<IGpuAccTunedParameters>;
 
-/// Manages a set of Open CL parameters which have been tuned for maximum performance.
-/// Pass an instance of this object to the IRuntime::Create() method (via IRuntime::CreationOptions) to use it
-/// for all CL workload execution.
+/// Manages a set of GpuAcc parameters which have been tuned for maximum performance.
+/// Passes an instance of this object to the IRuntime::Create() method (via IRuntime::CreationOptions) to use it
+/// for all GPU workload execution.
 ///
 /// Can be created in two modes:
-///     - In UseTunedParameters mode the parameters stored in this object are used to execute CL workloads.
-///     - In UpdateTunedParameters mode, additionally, whenever a CL workload is executed for the first time the
+///     - In UseTunedParameters mode, the parameters stored in this object are used to execute GPU workloads.
+///     - In UpdateTunedParameters mode, additionally, whenever a GPU workload is executed for the first time, the
 ///       optimum parameters will be found and stored in this object. WARNING - This tuning can be slow.
 ///
-/// The parameters can be loaded from and saved to a file so that you first run a slow initial read-write
+/// The parameters can be loaded from and saved to a file so that you can first run a slow initial read-write
 /// execution, save the parameters for later and then run fast read-only executions using the optimised parameters.
-class IClTunedParameters
+class IGpuAccTunedParameters
 {
 public:
     enum class Mode
@@ -96,10 +111,10 @@
 
     /// Creates an IClTunedParameters with the given mode.
     /// @{
-    static IClTunedParameters* CreateRaw(Mode mode);
-    static IClTunedParametersPtr Create(Mode mode);
+    static IGpuAccTunedParameters* CreateRaw(Mode mode);
+    static IGpuAccTunedParametersPtr Create(Mode mode);
     /// @}
-    static void Destroy(IClTunedParameters* params);
+    static void Destroy(IGpuAccTunedParameters* params);
 
     /// Loads an existing set of tuned parameters from the given file.
     /// If there is an error loading the file, an armnn::Exception is thrown.
@@ -110,7 +125,7 @@
     virtual void Save(const char* filename) const = 0;
 
 protected:
-    virtual ~IClTunedParameters() {};
+    virtual ~IGpuAccTunedParameters() {};
 };
 
 }