blob: debb2d6896f9751f497eb1a0271f519c2c0a7e94 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
telsoa014fcda012018-03-09 14:13:49 +00002// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
Derek Lamberti836b27b2019-11-20 10:51:57 +00007#include "BackendOptions.hpp"
telsoa014fcda012018-03-09 14:13:49 +00008#include "INetwork.hpp"
telsoa01c577f2c2018-08-31 09:22:23 +01009#include "IProfiler.hpp"
Mike Kelly55a8ffd2021-04-07 20:10:49 +010010#include "IWorkingMemHandle.hpp"
Keith Davise813d672021-04-22 10:10:34 +010011#include "IAsyncExecutionCallback.hpp"
Matthew Bentham313e1c82019-03-25 17:37:47 +000012#include "Tensor.hpp"
13#include "Types.hpp"
telsoa014fcda012018-03-09 14:13:49 +000014#include "TypesUtils.hpp"
Jim Flynn4e755a52020-03-29 17:48:26 +010015#include "profiling/ILocalPacketHandler.hpp"
telsoa014fcda012018-03-09 14:13:49 +000016
David Monahan801e2d52021-07-19 17:06:30 +010017#include <armnn/backends/ICustomAllocator.hpp>
Sadik Armaganb8a26d82021-10-04 15:13:11 +010018#include <armnn/backends/IMemoryOptimizerStrategy.hpp>
Matthew Bentham313e1c82019-03-25 17:37:47 +000019#include <memory>
Jan Eilersc1c872f2021-07-22 13:17:04 +010020#include <map>
Matthew Bentham313e1c82019-03-25 17:37:47 +000021
telsoa014fcda012018-03-09 14:13:49 +000022namespace armnn
23{
24
25using NetworkId = int;
26
telsoa01c577f2c2018-08-31 09:22:23 +010027class IGpuAccTunedParameters;
telsoa014fcda012018-03-09 14:13:49 +000028
Kevin Mayd92a6e42021-02-04 10:27:41 +000029struct RuntimeImpl;
telsoa014fcda012018-03-09 14:13:49 +000030class IRuntime;
31using IRuntimePtr = std::unique_ptr<IRuntime, void(*)(IRuntime* runtime)>;
32
David Monahan4f1e8e42019-09-04 09:22:10 +010033struct INetworkProperties
Jan Eilers446707f2021-12-16 11:28:37 +000034{
Finn Williamsf364d532021-06-09 17:07:33 +010035 INetworkProperties(bool asyncEnabled,
Keith Davisf4874862021-08-09 16:49:18 +010036 MemorySource inputSource,
37 MemorySource outputSource,
38 bool profilingEnabled = false,
Finn Williamsb1aad422021-10-28 19:07:32 +010039 ProfilingDetailsMethod detailsMethod = ProfilingDetailsMethod::Undefined,
40 bool externalMemoryManagementEnabled = false)
Keith Davisf4874862021-08-09 16:49:18 +010041 : m_ImportEnabled(inputSource != MemorySource::Undefined),
42 m_ExportEnabled(outputSource != MemorySource::Undefined),
Keith Davis554fa092021-07-20 11:25:22 +010043 m_AsyncEnabled(asyncEnabled),
44 m_ProfilingEnabled(profilingEnabled),
Keith Davis4914d0c2021-08-18 17:14:05 +010045 m_OutputNetworkDetailsMethod(detailsMethod),
Keith Davisf4874862021-08-09 16:49:18 +010046 m_InputSource(inputSource),
Finn Williamsb1aad422021-10-28 19:07:32 +010047 m_OutputSource(outputSource),
48 m_ExternalMemoryManagementEnabled(externalMemoryManagementEnabled)
Keith Davis554fa092021-07-20 11:25:22 +010049 {}
Francis Murtagh73d3e2e2021-04-29 14:23:04 +010050
51 /// Deprecated and will be removed in future release.
David Monahan4f1e8e42019-09-04 09:22:10 +010052 const bool m_ImportEnabled;
Francis Murtagh73d3e2e2021-04-29 14:23:04 +010053 /// Deprecated and will be removed in future release.
David Monahan4f1e8e42019-09-04 09:22:10 +010054 const bool m_ExportEnabled;
Francis Murtagh73d3e2e2021-04-29 14:23:04 +010055
Keith Davis554fa092021-07-20 11:25:22 +010056 const bool m_AsyncEnabled;
57
58 const bool m_ProfilingEnabled;
Keith Davise813d672021-04-22 10:10:34 +010059
Keith Davis4914d0c2021-08-18 17:14:05 +010060 const ProfilingDetailsMethod m_OutputNetworkDetailsMethod;
Keith Davisf4874862021-08-09 16:49:18 +010061
Francis Murtagh73d3e2e2021-04-29 14:23:04 +010062 const MemorySource m_InputSource;
63 const MemorySource m_OutputSource;
David Monahan4f1e8e42019-09-04 09:22:10 +010064
Finn Williamsb1aad422021-10-28 19:07:32 +010065 const bool m_ExternalMemoryManagementEnabled;
66
David Monahan4f1e8e42019-09-04 09:22:10 +010067 virtual ~INetworkProperties() {}
68};
69
Mike Kelly386ff1a2021-03-29 15:04:50 +010070using namespace armnn::experimental;
71
telsoa014fcda012018-03-09 14:13:49 +000072class IRuntime
73{
74public:
75 struct CreationOptions
76 {
telsoa01c577f2c2018-08-31 09:22:23 +010077 CreationOptions()
78 : m_GpuAccTunedParameters(nullptr)
79 , m_EnableGpuProfiling(false)
Matteo Martincighe7d44982019-08-05 12:16:47 +010080 , m_DynamicBackendsPath("")
Jan Eilers15fcc7e2021-07-14 13:50:15 +010081 , m_ProtectedMode(false)
Jan Eilersc1c872f2021-07-22 13:17:04 +010082 , m_CustomAllocatorMap()
Sadik Armaganb8a26d82021-10-04 15:13:11 +010083 , m_MemoryOptimizerStrategyMap()
telsoa01c577f2c2018-08-31 09:22:23 +010084 {}
telsoa014fcda012018-03-09 14:13:49 +000085
telsoa01c577f2c2018-08-31 09:22:23 +010086 /// If set, uses the GpuAcc tuned parameters from the given object when executing GPU workloads.
87 /// It will also be updated with new tuned parameters if it is configured to do so.
88 std::shared_ptr<IGpuAccTunedParameters> m_GpuAccTunedParameters;
89
Ryan OShea2bbfaa72020-02-12 16:15:27 +000090 /// Setting this flag will allow the user to obtain GPU profiling information from the runtime.
telsoa01c577f2c2018-08-31 09:22:23 +010091 bool m_EnableGpuProfiling;
Matteo Martincighe7d44982019-08-05 12:16:47 +010092
Ryan OShea2bbfaa72020-02-12 16:15:27 +000093 /// Setting this value will override the paths set by the DYNAMIC_BACKEND_PATHS compiler directive
94 /// Only a single path is allowed for the override
Jan Eilersb1c62f12021-10-26 14:56:47 +010095 /// It defines the path to search for any [dynamic backend libraries](src/dynamic/README.md).
Matteo Martincighe7d44982019-08-05 12:16:47 +010096 std::string m_DynamicBackendsPath;
Aron Virginas-Tar1a0f6912019-08-23 15:18:44 +010097
Jan Eilers15fcc7e2021-07-14 13:50:15 +010098 /// Setting this flag will allow the user to create the Runtime in protected mode.
99 /// It will run all the inferences on protected memory and will make sure that
100 /// INetworkProperties::m_ImportEnabled set to true with MemorySource::DmaBufProtected option
Jan Eilersc1c872f2021-07-22 13:17:04 +0100101 /// This requires that the backend supports Protected Memory and has an allocator capable of
102 /// allocating Protected Memory associated with it.
Jan Eilers15fcc7e2021-07-14 13:50:15 +0100103 bool m_ProtectedMode;
104
Jan Eilersc1c872f2021-07-22 13:17:04 +0100105 /// @brief A map to define a custom memory allocator for specific backend Ids.
106 ///
107 /// @details A Custom Allocator is used for allocation of working memory in the backends.
108 /// Set this if you need to take control of how memory is allocated on a backend. Required for
109 /// Protected Mode in order to correctly allocate Protected Memory
110 ///
111 /// @note Only supported for GpuAcc
112 std::map<BackendId, std::shared_ptr<ICustomAllocator>> m_CustomAllocatorMap;
113
Sadik Armaganb8a26d82021-10-04 15:13:11 +0100114 /// @brief A map to define a custom memory optimizer strategy for specific backend Ids.
115 ///
116 /// @details A Memory Optimizer Strategy provides a solution to an abstract representation of
117 /// a network's memory requirements. This can also be used to return a pre-computed solution
118 /// for a specific network. Set this if you want to implement a Custom Memory Optimizer Strategy
119 /// for a given backend.
120 std::map<BackendId, std::shared_ptr<IMemoryOptimizerStrategy>> m_MemoryOptimizerStrategyMap;
121
Aron Virginas-Tar1a0f6912019-08-23 15:18:44 +0100122 struct ExternalProfilingOptions
123 {
124 ExternalProfilingOptions()
125 : m_EnableProfiling(false)
Jim Flynn4e755a52020-03-29 17:48:26 +0100126 , m_TimelineEnabled(false)
Aron Virginas-Tar1a0f6912019-08-23 15:18:44 +0100127 , m_OutgoingCaptureFile("")
128 , m_IncomingCaptureFile("")
129 , m_FileOnly(false)
Colm Donelan02705242019-11-14 14:19:07 +0000130 , m_CapturePeriod(LOWEST_CAPTURE_PERIOD)
Isabella Gottardia0687ee2020-03-11 18:04:20 +0000131 , m_FileFormat("binary")
Jim Flynn4e755a52020-03-29 17:48:26 +0100132 , m_LocalPacketHandlers()
Aron Virginas-Tar1a0f6912019-08-23 15:18:44 +0100133 {}
134
Jan Eilersb1c62f12021-10-26 14:56:47 +0100135 /// Indicates whether external profiling is enabled or not.
Aron Virginas-Tar1a0f6912019-08-23 15:18:44 +0100136 bool m_EnableProfiling;
Jan Eilersb1c62f12021-10-26 14:56:47 +0100137 /// Indicates whether external timeline profiling is enabled or not.
Jim Flynn4e755a52020-03-29 17:48:26 +0100138 bool m_TimelineEnabled;
Jan Eilersb1c62f12021-10-26 14:56:47 +0100139 /// Path to a file in which outgoing timeline profiling messages will be stored.
Aron Virginas-Tar1a0f6912019-08-23 15:18:44 +0100140 std::string m_OutgoingCaptureFile;
Jan Eilersb1c62f12021-10-26 14:56:47 +0100141 /// Path to a file in which incoming timeline profiling messages will be stored.
Aron Virginas-Tar1a0f6912019-08-23 15:18:44 +0100142 std::string m_IncomingCaptureFile;
Jan Eilersb1c62f12021-10-26 14:56:47 +0100143 /// Enable profiling output to file only.
Aron Virginas-Tar1a0f6912019-08-23 15:18:44 +0100144 bool m_FileOnly;
Jan Eilersb1c62f12021-10-26 14:56:47 +0100145 /// The duration at which captured profiling messages will be flushed.
Aron Virginas-Tar1a0f6912019-08-23 15:18:44 +0100146 uint32_t m_CapturePeriod;
Jan Eilersb1c62f12021-10-26 14:56:47 +0100147 /// The format of the file used for outputting profiling data.
Isabella Gottardia0687ee2020-03-11 18:04:20 +0000148 std::string m_FileFormat;
Cathal Corbett5aa9fd72022-02-25 15:33:28 +0000149 std::vector<arm::pipe::ILocalPacketHandlerSharedPtr> m_LocalPacketHandlers;
Aron Virginas-Tar1a0f6912019-08-23 15:18:44 +0100150 };
Jim Flynn4951b8c2019-10-03 10:04:30 -0700151 ExternalProfilingOptions m_ProfilingOptions;
Derek Lamberti836b27b2019-11-20 10:51:57 +0000152
153 /// Pass backend specific options.
154 ///
155 /// For example, to enable GpuAcc tuning add the following
Ryan OShea2bbfaa72020-02-12 16:15:27 +0000156 /// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.cpp
Derek Lamberti836b27b2019-11-20 10:51:57 +0000157 /// m_BackendOption.emplace_back(
158 /// BackendOptions{"GpuAcc",
159 /// {
160 /// {"TuningLevel", 2},
161 /// {"TuningFile", filename}
Sadik Armaganb8a26d82021-10-04 15:13:11 +0100162 /// {"MemoryOptimizerStrategy", strategyname}
Derek Lamberti836b27b2019-11-20 10:51:57 +0000163 /// }
164 /// });
Ryan OShea2bbfaa72020-02-12 16:15:27 +0000165 /// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Derek Lamberti836b27b2019-11-20 10:51:57 +0000166 /// Execute representative workloads through the runtime to generate tuning data.
167 /// The tuning file is written once the runtime is destroyed
168
169 /// To execute with the tuning data, start up with just the tuning file specified.
Ryan OShea2bbfaa72020-02-12 16:15:27 +0000170 /// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.cpp
Derek Lamberti836b27b2019-11-20 10:51:57 +0000171 /// m_BackendOption.emplace_back(
172 /// BackendOptions{"GpuAcc",
173 /// {
174 /// {"TuningFile", filename}
175 /// }
176 /// });
Ryan OShea2bbfaa72020-02-12 16:15:27 +0000177 /// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Derek Lamberti836b27b2019-11-20 10:51:57 +0000178
179 /// The following backend options are available:
Sadik Armaganb8a26d82021-10-04 15:13:11 +0100180 /// AllBackends:
181 /// "MemoryOptimizerStrategy" : string [stategynameString]
Jim Flynne1fdd282021-10-26 21:26:10 +0100182 /// (Existing Memory Optimizer Strategies: ConstantMemoryStrategy)
Derek Lamberti836b27b2019-11-20 10:51:57 +0000183 /// GpuAcc:
184 /// "TuningLevel" : int [0..3] (0=UseOnly(default) | 1=RapidTuning | 2=NormalTuning | 3=ExhaustiveTuning)
185 /// "TuningFile" : string [filenameString]
186 /// "KernelProfilingEnabled" : bool [true | false]
187 std::vector<BackendOptions> m_BackendOptions;
telsoa014fcda012018-03-09 14:13:49 +0000188 };
189
190 static IRuntime* CreateRaw(const CreationOptions& options);
191 static IRuntimePtr Create(const CreationOptions& options);
192 static void Destroy(IRuntime* runtime);
193
telsoa01c577f2c2018-08-31 09:22:23 +0100194 /// Loads a complete network into the IRuntime.
195 /// @param [out] networkIdOut - Unique identifier for the network is returned in this reference.
196 /// @param [in] network - Complete network to load into the IRuntime.
telsoa014fcda012018-03-09 14:13:49 +0000197 /// The runtime takes ownership of the network once passed in.
198 /// @return armnn::Status
Kevin Mayd92a6e42021-02-04 10:27:41 +0000199 Status LoadNetwork(NetworkId& networkIdOut, IOptimizedNetworkPtr network);
telsoa014fcda012018-03-09 14:13:49 +0000200
telsoa01c577f2c2018-08-31 09:22:23 +0100201 /// Load a complete network into the IRuntime.
202 /// @param [out] networkIdOut Unique identifier for the network is returned in this reference.
203 /// @param [in] network Complete network to load into the IRuntime.
204 /// @param [out] errorMessage Error message if there were any errors.
205 /// The runtime takes ownership of the network once passed in.
206 /// @return armnn::Status
Kevin Mayd92a6e42021-02-04 10:27:41 +0000207 Status LoadNetwork(NetworkId& networkIdOut,
208 IOptimizedNetworkPtr network,
209 std::string& errorMessage);
David Monahan4f1e8e42019-09-04 09:22:10 +0100210
Kevin Mayd92a6e42021-02-04 10:27:41 +0000211 Status LoadNetwork(NetworkId& networkIdOut,
212 IOptimizedNetworkPtr network,
213 std::string& errorMessage,
214 const INetworkProperties& networkProperties);
telsoa01c577f2c2018-08-31 09:22:23 +0100215
Kevin Mayd92a6e42021-02-04 10:27:41 +0000216 TensorInfo GetInputTensorInfo(NetworkId networkId, LayerBindingId layerId) const;
217 TensorInfo GetOutputTensorInfo(NetworkId networkId, LayerBindingId layerId) const;
telsoa014fcda012018-03-09 14:13:49 +0000218
Finn Williamsf37b9702021-09-01 18:06:04 +0100219 /// ImportInputs separates the importing and mapping of InputTensors from network execution.
220 /// Allowing for a set of InputTensors to be imported and mapped once, but used in execution many times.
221 /// This function is not thread safe and must not be used while other threads are calling Execute().
Narumol Prangnawarate2af6f42022-01-28 17:59:18 +0000222 /// Only compatible with AsyncEnabled networks and aligned memory import
223 std::vector<ImportedInputId> ImportInputs(NetworkId networkId, const InputTensors& inputTensors,
224 MemorySource forceImportMemorySource = MemorySource::Undefined);
Finn Williamsf37b9702021-09-01 18:06:04 +0100225
Finn Williams8636bc72021-10-02 15:06:39 +0100226 /// ImportOutputs separates the importing and mapping of OutputTensors from network execution.
227 /// Allowing for a set of OutputTensors to be imported and mapped once, but used in execution many times.
228 /// This function is not thread safe and must not be used while other threads are calling Execute().
Narumol Prangnawarate2af6f42022-01-28 17:59:18 +0000229 /// Only compatible with AsyncEnabled networks and aligned memory import
230 std::vector<ImportedOutputId> ImportOutputs(NetworkId networkId, const OutputTensors& outputTensors,
231 MemorySource forceImportMemorySource = MemorySource::Undefined);
Finn Williams8636bc72021-10-02 15:06:39 +0100232
233 /// Un-import and delete the imported InputTensor/s
234 /// This function is not thread safe and must not be used while other threads are calling Execute().
235 /// Only compatible with AsyncEnabled networks
236 void ClearImportedInputs(NetworkId networkId, const std::vector<ImportedInputId> inputIds);
237
238 /// Un-import and delete the imported OutputTensor/s
239 /// This function is not thread safe and must not be used while other threads are calling Execute().
240 /// Only compatible with AsyncEnabled networks
241 void ClearImportedOutputs(NetworkId networkId, const std::vector<ImportedOutputId> outputIds);
Finn Williamsf37b9702021-09-01 18:06:04 +0100242
telsoa01c577f2c2018-08-31 09:22:23 +0100243 /// Evaluates a network using input in inputTensors and outputs filled into outputTensors
Kevin Mayd92a6e42021-02-04 10:27:41 +0000244 Status EnqueueWorkload(NetworkId networkId,
245 const InputTensors& inputTensors,
Narumol Prangnawarate2af6f42022-01-28 17:59:18 +0000246 const OutputTensors& outputTensors,
247 std::vector<ImportedInputId> preImportedInputIds = {},
248 std::vector<ImportedOutputId> preImportedOutputIds = {});
telsoa014fcda012018-03-09 14:13:49 +0000249
Mike Kelly55a8ffd2021-04-07 20:10:49 +0100250 /// This is an experimental function.
251 /// Evaluates a network using input in inputTensors and outputs filled into outputTensors.
252 /// This function performs a thread safe execution of the network. Returns once execution is complete.
253 /// Will block until this and any other thread using the same workingMem object completes.
254 Status Execute(IWorkingMemHandle& workingMemHandle,
255 const InputTensors& inputTensors,
Finn Williamsf37b9702021-09-01 18:06:04 +0100256 const OutputTensors& outputTensors,
Finn Williams8636bc72021-10-02 15:06:39 +0100257 std::vector<ImportedInputId> preImportedInputs = {},
258 std::vector<ImportedOutputId> preImportedOutputs = {});
Mike Kelly55a8ffd2021-04-07 20:10:49 +0100259
telsoa01c577f2c2018-08-31 09:22:23 +0100260 /// Unloads a network from the IRuntime.
telsoa014fcda012018-03-09 14:13:49 +0000261 /// At the moment this only removes the network from the m_Impl->m_Network.
262 /// This might need more work in the future to be AndroidNN compliant.
telsoa01c577f2c2018-08-31 09:22:23 +0100263 /// @param [in] networkId - Unique identifier for the network to be unloaded. Generated in LoadNetwork().
telsoa014fcda012018-03-09 14:13:49 +0000264 /// @return armnn::Status
Kevin Mayd92a6e42021-02-04 10:27:41 +0000265 Status UnloadNetwork(NetworkId networkId);
telsoa014fcda012018-03-09 14:13:49 +0000266
Kevin Mayd92a6e42021-02-04 10:27:41 +0000267 const IDeviceSpec& GetDeviceSpec() const;
telsoa01c577f2c2018-08-31 09:22:23 +0100268
Mike Kelly55a8ffd2021-04-07 20:10:49 +0100269 /// Create a new unique WorkingMemHandle object. Create multiple handles if you wish to have
270 /// overlapped Execution by calling this function from different threads.
271 std::unique_ptr<IWorkingMemHandle> CreateWorkingMemHandle(NetworkId networkId);
272
telsoa01c577f2c2018-08-31 09:22:23 +0100273 /// Gets the profiler corresponding to the given network id.
274 /// @param networkId The id of the network for which to get the profile.
275 /// @return A pointer to the requested profiler, or nullptr if not found.
Kevin Mayd92a6e42021-02-04 10:27:41 +0000276 const std::shared_ptr<IProfiler> GetProfiler(NetworkId networkId) const;
telsoa014fcda012018-03-09 14:13:49 +0000277
Nattapat Chaimanowong6e948202019-03-22 14:01:46 +0000278 /// Registers a callback function to debug layers performing custom computations on intermediate tensors.
279 /// @param networkId The id of the network to register the callback.
280 /// @param func callback function to pass to the debug layer.
Kevin Mayd92a6e42021-02-04 10:27:41 +0000281 void RegisterDebugCallback(NetworkId networkId, const DebugCallbackFunction& func);
Nattapat Chaimanowong6e948202019-03-22 14:01:46 +0000282
telsoa014fcda012018-03-09 14:13:49 +0000283protected:
Kevin Mayd92a6e42021-02-04 10:27:41 +0000284 IRuntime();
285 IRuntime(const IRuntime::CreationOptions& options);
286 ~IRuntime();
287
288 std::unique_ptr<RuntimeImpl> pRuntimeImpl;
telsoa014fcda012018-03-09 14:13:49 +0000289};
290
Derek Lamberti836b27b2019-11-20 10:51:57 +0000291
292/// The following API is replaced by the backend options API.
telsoa01c577f2c2018-08-31 09:22:23 +0100293using IGpuAccTunedParametersPtr = std::shared_ptr<IGpuAccTunedParameters>;
telsoa014fcda012018-03-09 14:13:49 +0000294
telsoa01c577f2c2018-08-31 09:22:23 +0100295/// Manages a set of GpuAcc parameters which have been tuned for maximum performance.
296/// Passes an instance of this object to the IRuntime::Create() method (via IRuntime::CreationOptions) to use it
297/// for all GPU workload execution.
telsoa014fcda012018-03-09 14:13:49 +0000298///
299/// Can be created in two modes:
telsoa01c577f2c2018-08-31 09:22:23 +0100300/// - In UseTunedParameters mode, the parameters stored in this object are used to execute GPU workloads.
301/// - In UpdateTunedParameters mode, additionally, whenever a GPU workload is executed for the first time, the
telsoa014fcda012018-03-09 14:13:49 +0000302/// optimum parameters will be found and stored in this object. WARNING - This tuning can be slow.
303///
telsoa01c577f2c2018-08-31 09:22:23 +0100304/// The parameters can be loaded from and saved to a file so that you can first run a slow initial read-write
telsoa014fcda012018-03-09 14:13:49 +0000305/// execution, save the parameters for later and then run fast read-only executions using the optimised parameters.
telsoa01c577f2c2018-08-31 09:22:23 +0100306class IGpuAccTunedParameters
telsoa014fcda012018-03-09 14:13:49 +0000307{
308public:
309 enum class Mode
310 {
311 UseTunedParameters,
312 UpdateTunedParameters
313 };
314
Ruomei Yan49937f32019-04-25 14:24:05 +0100315 enum class TuningLevel
316 {
Inki Dae23dbe3d2021-03-16 16:24:09 +0900317 Rapid = 1,
318 Normal = 2,
319 Exhaustive = 3
Ruomei Yan49937f32019-04-25 14:24:05 +0100320 };
321
telsoa014fcda012018-03-09 14:13:49 +0000322 /// Creates an IClTunedParameters with the given mode.
323 /// @{
Ruomei Yan49937f32019-04-25 14:24:05 +0100324 static IGpuAccTunedParameters* CreateRaw(Mode mode, TuningLevel tunerMode);
325 static IGpuAccTunedParametersPtr Create(Mode mode, TuningLevel tunerMode);
telsoa014fcda012018-03-09 14:13:49 +0000326 /// @}
telsoa01c577f2c2018-08-31 09:22:23 +0100327 static void Destroy(IGpuAccTunedParameters* params);
telsoa014fcda012018-03-09 14:13:49 +0000328
329 /// Loads an existing set of tuned parameters from the given file.
330 /// If there is an error loading the file, an armnn::Exception is thrown.
331 virtual void Load(const char* filename) = 0;
332
333 /// Saves the current set of tuned parameters to the given file.
334 /// If there is an error saving to the file, an armnn::Exception is thrown.
335 virtual void Save(const char* filename) const = 0;
336
337protected:
telsoa01c577f2c2018-08-31 09:22:23 +0100338 virtual ~IGpuAccTunedParameters() {};
telsoa014fcda012018-03-09 14:13:49 +0000339};
340
David Monahan4f1e8e42019-09-04 09:22:10 +0100341} // namespace armnn