blob: afe3060d1ee6c81d755a8e03be5a3249d420f72b [file] [log] [blame]
Mike Kellyb5fdf382019-06-11 16:35:25 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "ArmnnDriverImpl.hpp"
7#include "../ArmnnPreparedModel_1_2.hpp"
8#include "../ModelToINetworkConverter.hpp"
9#include "../SystemPropertiesUtils.hpp"
10
11#include <log/log.h>
12
13namespace
14{
15
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +010016const char *g_RelaxedFloat32toFloat16PerformanceExecTime = "ArmNN.relaxedFloat32toFloat16Performance.execTime";
FinnWilliamsArmdf655ee2019-07-24 16:04:18 +010017const char *g_RelaxedFloat32toFloat16PerformancePowerUsage = "ArmNN.relaxedFloat32toFloat16Performance.powerUsage";
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +010018
19const char *g_OperandTypeTensorFloat32PerformanceExecTime = "Armnn.operandTypeTensorFloat32Performance.execTime";
20const char *g_OperandTypeTensorFloat32PerformancePowerUsage = "Armnn.operandTypeTensorFloat32Performance.powerUsage";
21
22const char *g_OperandTypeFloat32PerformanceExecTime = "Armnn.operandTypeFloat32Performance.execTime";
23const char *g_OperandTypeFloat32PerformancePowerUsage = "Armnn.operandTypeFloat32Performance.powerUsage";
24
25const char *g_OperandTypeTensorFloat16PerformanceExecTime = "Armnn.operandTypeTensorFloat16Performance.execTime";
26const char *g_OperandTypeTensorFloat16PerformancePowerUsage = "Armnn.operandTypeTensorFloat16Performance.powerUsage";
27
28const char *g_OperandTypeFloat16PerformanceExecTime = "Armnn.operandTypeFloat16Performance.execTime";
29const char *g_OperandTypeFloat16PerformancePowerUsage = "Armnn.operandTypeFloat16Performance.powerUsage";
30
31const char *g_OperandTypeTensorQuant8AsymmPerformanceExecTime =
32 "Armnn.operandTypeTensorQuant8AsymmPerformance.execTime";
33const char *g_OperandTypeTensorQuant8AsymmPerformancePowerUsage =
34 "Armnn.operandTypeTensorQuant8AsymmPerformance.powerUsage";
35
36const char *g_OperandTypeTensorQuant16SymmPerformanceExecTime =
37 "Armnn.operandTypeTensorQuant16SymmPerformance.execTime";
38const char *g_OperandTypeTensorQuant16SymmPerformancePowerUsage =
39 "Armnn.operandTypeTensorQuant16SymmPerformance.powerUsage";
40
Pablo Tellofb45e2f2019-10-18 16:51:57 +010041const char *g_OperandTypeTensorQuant8SymmPerformanceExecTime =
42 "Armnn.operandTypeTensorQuant8SymmPerformance.execTime";
43const char *g_OperandTypeTensorQuant8SymmPerformancePowerUsage =
44 "Armnn.operandTypeTensorQuant8SymmPerformance.powerUsage";
45
Kevin May87cb7612019-11-11 17:30:35 +000046const char *g_OperandTypeTensorQuant8SymmPerChannelPerformanceExecTime =
47 "Armnn.operandTypeTensorQuant8SymmPerChannelPerformance.execTime";
48const char *g_OperandTypeTensorQuant8SymmPerChannelPerformancePowerUsage =
49 "Armnn.operandTypeTensorQuant8SymmPerChannelPerformance.powerUsage";
50
Pablo Tellofb45e2f2019-10-18 16:51:57 +010051
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +010052const char *g_OperandTypeTensorInt32PerformanceExecTime = "Armnn.operandTypeTensorInt32Performance.execTime";
53const char *g_OperandTypeTensorInt32PerformancePowerUsage = "Armnn.operandTypeTensorInt32Performance.powerUsage";
54
55const char *g_OperandTypeInt32PerformanceExecTime = "Armnn.operandTypeInt32Performance.execTime";
56const char *g_OperandTypeInt32PerformancePowerUsage = "Armnn.operandTypeInt32Performance.powerUsage";
57
58
Sadik Armagan188675f2021-02-12 17:16:42 +000059void NotifyCallbackAndCheck(const android::sp<V1_2::IPreparedModelCallback>& callback,
Kevin Mayec1e5b82020-02-26 17:00:39 +000060 V1_0::ErrorStatus errorStatus,
Sadik Armagan188675f2021-02-12 17:16:42 +000061 const android::sp<V1_2::IPreparedModel>& preparedModelPtr)
Mike Kellyb5fdf382019-06-11 16:35:25 +010062{
Ferran Balaguerb2397fd2019-07-25 12:12:39 +010063 Return<void> returned = callback->notify_1_2(errorStatus, preparedModelPtr);
Mike Kellyb5fdf382019-06-11 16:35:25 +010064 // This check is required, if the callback fails and it isn't checked it will bring down the service
65 if (!returned.isOk())
66 {
67 ALOGE("ArmnnDriverImpl::prepareModel: hidl callback failed to return properly: %s ",
68 returned.description().c_str());
69 }
70}
71
Kevin Mayec1e5b82020-02-26 17:00:39 +000072Return<V1_0::ErrorStatus> FailPrepareModel(V1_0::ErrorStatus error,
73 const std::string& message,
Sadik Armagan188675f2021-02-12 17:16:42 +000074 const android::sp<V1_2::IPreparedModelCallback>& callback)
Mike Kellyb5fdf382019-06-11 16:35:25 +010075{
76 ALOGW("ArmnnDriverImpl::prepareModel: %s", message.c_str());
77 NotifyCallbackAndCheck(callback, error, nullptr);
78 return error;
79}
80
81} // anonymous namespace
82
83namespace armnn_driver
84{
85namespace hal_1_2
86{
87
Kevin Mayec1e5b82020-02-26 17:00:39 +000088Return<V1_0::ErrorStatus> ArmnnDriverImpl::prepareArmnnModel_1_2(
89 const armnn::IRuntimePtr& runtime,
90 const armnn::IGpuAccTunedParametersPtr& clTunedParameters,
91 const DriverOptions& options,
92 const V1_2::Model& model,
Sadik Armagan188675f2021-02-12 17:16:42 +000093 const android::sp<V1_2::IPreparedModelCallback>& cb,
Kevin Mayec1e5b82020-02-26 17:00:39 +000094 bool float32ToFloat16)
Mike Kellyb5fdf382019-06-11 16:35:25 +010095{
Matteo Martincigh0bd89a82019-07-02 16:53:10 +010096 ALOGV("ArmnnDriverImpl::prepareArmnnModel_1_2()");
Mike Kellyb5fdf382019-06-11 16:35:25 +010097
98 if (cb.get() == nullptr)
99 {
100 ALOGW("ArmnnDriverImpl::prepareModel: Invalid callback passed to prepareModel");
Kevin Mayec1e5b82020-02-26 17:00:39 +0000101 return V1_0::ErrorStatus::INVALID_ARGUMENT;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100102 }
103
104 if (!runtime)
105 {
Kevin Mayec1e5b82020-02-26 17:00:39 +0000106 return FailPrepareModel(V1_0::ErrorStatus::DEVICE_UNAVAILABLE, "Device unavailable", cb);
Mike Kellyb5fdf382019-06-11 16:35:25 +0100107 }
108
109 if (!android::nn::validateModel(model))
110 {
Kevin Mayec1e5b82020-02-26 17:00:39 +0000111 return FailPrepareModel(V1_0::ErrorStatus::INVALID_ARGUMENT, "Invalid model passed as input", cb);
Mike Kellyb5fdf382019-06-11 16:35:25 +0100112 }
113
114 // Deliberately ignore any unsupported operations requested by the options -
115 // at this point we're being asked to prepare a model that we've already declared support for
116 // and the operation indices may be different to those in getSupportedOperations anyway.
117 std::set<unsigned int> unsupportedOperations;
118 ModelToINetworkConverter<HalPolicy> modelConverter(options.GetBackends(),
119 model,
120 unsupportedOperations);
121
122 if (modelConverter.GetConversionResult() != ConversionResult::Success)
123 {
Kevin Mayec1e5b82020-02-26 17:00:39 +0000124 FailPrepareModel(V1_0::ErrorStatus::GENERAL_FAILURE, "ModelToINetworkConverter failed", cb);
125 return V1_0::ErrorStatus::NONE;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100126 }
127
Sadik Armaganb3021432021-01-13 15:56:51 +0000128 // Serialize the network graph to a .armnn file if an output directory
129 // has been specified in the drivers' arguments.
130 auto serializedNetworkFileName =
131 SerializeNetwork(*modelConverter.GetINetwork(), options.GetRequestInputsAndOutputsDumpDir());
132
Mike Kellyb5fdf382019-06-11 16:35:25 +0100133 // Optimize the network
134 armnn::IOptimizedNetworkPtr optNet(nullptr, nullptr);
135 armnn::OptimizerOptions OptOptions;
136 OptOptions.m_ReduceFp32ToFp16 = float32ToFloat16;
137
Mike Kelly7ed56dd2020-09-30 20:22:56 +0100138 armnn::BackendOptions gpuAcc("GpuAcc",
139 {
Sadik Armaganf36e10b2021-01-11 16:34:01 +0000140 { "FastMathEnabled", options.IsFastMathEnabled() },
141 { "SaveCachedNetwork", options.SaveCachedNetwork() },
Finn Williamsf5ca16c2021-02-12 14:26:23 +0000142 { "CachedNetworkFilePath", options.GetCachedNetworkFilePath() },
143 { "MLGOTuningFilePath", options.GetClMLGOTunedParametersFile() }
Mike Kelly7ed56dd2020-09-30 20:22:56 +0100144 });
Finn Williamsf5ca16c2021-02-12 14:26:23 +0000145
Mike Kelly7ed56dd2020-09-30 20:22:56 +0100146 armnn::BackendOptions cpuAcc("CpuAcc",
147 {
Matthew Sloyancd639c92021-02-11 16:57:38 +0000148 { "FastMathEnabled", options.IsFastMathEnabled() },
149 { "NumberOfThreads", options.GetNumberOfThreads() }
Mike Kelly7ed56dd2020-09-30 20:22:56 +0100150 });
151 OptOptions.m_ModelOptions.push_back(gpuAcc);
152 OptOptions.m_ModelOptions.push_back(cpuAcc);
153
Mike Kellyb5fdf382019-06-11 16:35:25 +0100154 std::vector<std::string> errMessages;
155 try
156 {
157 optNet = armnn::Optimize(*modelConverter.GetINetwork(),
158 options.GetBackends(),
159 runtime->GetDeviceSpec(),
160 OptOptions,
161 errMessages);
162 }
Derek Lambertib9cb8442019-11-28 13:34:48 +0000163 catch (std::exception &e)
Mike Kellyb5fdf382019-06-11 16:35:25 +0100164 {
165 std::stringstream message;
Derek Lambertib9cb8442019-11-28 13:34:48 +0000166 message << "Exception (" << e.what() << ") caught from optimize.";
Kevin Mayec1e5b82020-02-26 17:00:39 +0000167 FailPrepareModel(V1_0::ErrorStatus::GENERAL_FAILURE, message.str(), cb);
168 return V1_0::ErrorStatus::NONE;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100169 }
170
171 // Check that the optimized network is valid.
172 if (!optNet)
173 {
174 std::stringstream message;
175 message << "Invalid optimized network";
176 for (const std::string& msg : errMessages)
177 {
178 message << "\n" << msg;
179 }
Kevin Mayec1e5b82020-02-26 17:00:39 +0000180 FailPrepareModel(V1_0::ErrorStatus::GENERAL_FAILURE, message.str(), cb);
181 return V1_0::ErrorStatus::NONE;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100182 }
183
184 // Export the optimized network graph to a dot file if an output dump directory
185 // has been specified in the drivers' arguments.
Jim Flynn829ad302019-12-13 14:43:24 +0000186 std::string dotGraphFileName = ExportNetworkGraphToDotFile(*optNet,
187 options.GetRequestInputsAndOutputsDumpDir());
Mike Kellyb5fdf382019-06-11 16:35:25 +0100188
189 // Load it into the runtime.
190 armnn::NetworkId netId = 0;
Finn Williamsd8fb5402021-05-19 20:52:00 +0100191 std::string msg;
192 armnn::INetworkProperties networkProperties(options.isAsyncModelExecutionEnabled(),
193 MemorySource::Undefined,
Finn Williamsca3a3e02021-06-11 15:04:02 +0100194 MemorySource::Undefined);
Mike Kellyb5fdf382019-06-11 16:35:25 +0100195 try
196 {
Finn Williamsd8fb5402021-05-19 20:52:00 +0100197 if (runtime->LoadNetwork(netId, move(optNet), msg, networkProperties) != armnn::Status::Success)
Mike Kellyb5fdf382019-06-11 16:35:25 +0100198 {
Kevin Mayec1e5b82020-02-26 17:00:39 +0000199 return FailPrepareModel(V1_0::ErrorStatus::GENERAL_FAILURE, "Network could not be loaded", cb);
Mike Kellyb5fdf382019-06-11 16:35:25 +0100200 }
201 }
Derek Lambertib9cb8442019-11-28 13:34:48 +0000202 catch (std::exception& e)
Mike Kellyb5fdf382019-06-11 16:35:25 +0100203 {
204 std::stringstream message;
Derek Lambertib9cb8442019-11-28 13:34:48 +0000205 message << "Exception (" << e.what()<< ") caught from LoadNetwork.";
Kevin Mayec1e5b82020-02-26 17:00:39 +0000206 FailPrepareModel(V1_0::ErrorStatus::GENERAL_FAILURE, message.str(), cb);
207 return V1_0::ErrorStatus::NONE;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100208 }
209
Sadik Armaganb3021432021-01-13 15:56:51 +0000210 // Now that we have a networkId for the graph rename the exported files to use it
211 // so that we can associate the graph file and the input/output tensor exported files
212 RenameExportedFiles(serializedNetworkFileName,
213 dotGraphFileName,
214 options.GetRequestInputsAndOutputsDumpDir(),
215 netId);
Jim Flynn829ad302019-12-13 14:43:24 +0000216
Mike Kellyb5fdf382019-06-11 16:35:25 +0100217 std::unique_ptr<ArmnnPreparedModel_1_2<hal_1_2::HalPolicy>> preparedModel(
218 new ArmnnPreparedModel_1_2<hal_1_2::HalPolicy>(
219 netId,
220 runtime.get(),
221 model,
222 options.GetRequestInputsAndOutputsDumpDir(),
Finn Williamsd8fb5402021-05-19 20:52:00 +0100223 options.IsGpuProfilingEnabled(),
Finn Williamsca3a3e02021-06-11 15:04:02 +0100224 options.isAsyncModelExecutionEnabled(),
225 options.getNoOfArmnnThreads()));
Mike Kellyb5fdf382019-06-11 16:35:25 +0100226
227 // Run a single 'dummy' inference of the model. This means that CL kernels will get compiled (and tuned if
228 // this is enabled) before the first 'real' inference which removes the overhead of the first inference.
229 if (!preparedModel->ExecuteWithDummyInputs())
230 {
Kevin Mayec1e5b82020-02-26 17:00:39 +0000231 return FailPrepareModel(V1_0::ErrorStatus::GENERAL_FAILURE, "Network could not be executed", cb);
Mike Kellyb5fdf382019-06-11 16:35:25 +0100232 }
233
234 if (clTunedParameters &&
235 options.GetClTunedParametersMode() == armnn::IGpuAccTunedParameters::Mode::UpdateTunedParameters)
236 {
237 // Now that we've done one inference the CL kernel parameters will have been tuned, so save the updated file.
238 try
239 {
240 clTunedParameters->Save(options.GetClTunedParametersFile().c_str());
241 }
Derek Lambertib9cb8442019-11-28 13:34:48 +0000242 catch (std::exception& error)
Mike Kellyb5fdf382019-06-11 16:35:25 +0100243 {
244 ALOGE("ArmnnDriverImpl::prepareModel: Failed to save CL tuned parameters file '%s': %s",
245 options.GetClTunedParametersFile().c_str(), error.what());
246 }
247 }
248
Kevin Mayec1e5b82020-02-26 17:00:39 +0000249 NotifyCallbackAndCheck(cb, V1_0::ErrorStatus::NONE, preparedModel.release());
Mike Kellyb5fdf382019-06-11 16:35:25 +0100250
Kevin Mayec1e5b82020-02-26 17:00:39 +0000251 return V1_0::ErrorStatus::NONE;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100252}
253
254Return<void> ArmnnDriverImpl::getCapabilities_1_2(const armnn::IRuntimePtr& runtime,
255 V1_2::IDevice::getCapabilities_1_2_cb cb)
256{
257 ALOGV("hal_1_2::ArmnnDriverImpl::getCapabilities()");
258
259 V1_2::Capabilities capabilities;
260
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100261 float defaultValue = .1f;
262
Mike Kellyb5fdf382019-06-11 16:35:25 +0100263 if (runtime)
264 {
265 capabilities.relaxedFloat32toFloat16PerformanceScalar.execTime =
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100266 ParseSystemProperty(g_RelaxedFloat32toFloat16PerformanceExecTime, defaultValue);
Mike Kellyb5fdf382019-06-11 16:35:25 +0100267
Kevin May2eaa1192020-04-15 16:50:57 +0100268 capabilities.relaxedFloat32toFloat16PerformanceScalar.powerUsage =
269 ParseSystemProperty(g_RelaxedFloat32toFloat16PerformancePowerUsage, defaultValue);
270
271 capabilities.relaxedFloat32toFloat16PerformanceTensor.execTime =
272 ParseSystemProperty(g_RelaxedFloat32toFloat16PerformanceExecTime, defaultValue);
273
FinnWilliamsArmdf655ee2019-07-24 16:04:18 +0100274 capabilities.relaxedFloat32toFloat16PerformanceTensor.powerUsage =
275 ParseSystemProperty(g_RelaxedFloat32toFloat16PerformancePowerUsage, defaultValue);
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100276
277 // Set the base value for all operand types
Sadik Armagan188675f2021-02-12 17:16:42 +0000278 #if defined(ARMNN_ANDROID_R) || defined(ARMNN_ANDROID_S)
Kevin Mayec1e5b82020-02-26 17:00:39 +0000279 capabilities.operandPerformance = nonExtensionOperandPerformance<HalVersion::V1_2>({FLT_MAX, FLT_MAX});
280 #else
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100281 capabilities.operandPerformance = nonExtensionOperandPerformance({FLT_MAX, FLT_MAX});
Kevin Mayec1e5b82020-02-26 17:00:39 +0000282 #endif
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100283
284 // Load supported operand types
Kevin Mayec1e5b82020-02-26 17:00:39 +0000285 update(&capabilities.operandPerformance, V1_2::OperandType::TENSOR_FLOAT32,
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100286 {
287 .execTime = ParseSystemProperty(g_OperandTypeTensorFloat32PerformanceExecTime, defaultValue),
288 .powerUsage = ParseSystemProperty(g_OperandTypeTensorFloat32PerformancePowerUsage, defaultValue)
289 });
290
Kevin Mayec1e5b82020-02-26 17:00:39 +0000291 update(&capabilities.operandPerformance, V1_2::OperandType::FLOAT32,
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100292 {
293 .execTime = ParseSystemProperty(g_OperandTypeFloat32PerformanceExecTime, defaultValue),
294 .powerUsage = ParseSystemProperty(g_OperandTypeFloat32PerformancePowerUsage, defaultValue)
295 });
296
Kevin Mayec1e5b82020-02-26 17:00:39 +0000297 update(&capabilities.operandPerformance, V1_2::OperandType::TENSOR_FLOAT16,
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100298 {
299 .execTime = ParseSystemProperty(g_OperandTypeTensorFloat16PerformanceExecTime, defaultValue),
300 .powerUsage = ParseSystemProperty(g_OperandTypeTensorFloat16PerformancePowerUsage, defaultValue)
301 });
302
Kevin Mayec1e5b82020-02-26 17:00:39 +0000303 update(&capabilities.operandPerformance, V1_2::OperandType::FLOAT16,
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100304 {
305 .execTime = ParseSystemProperty(g_OperandTypeFloat16PerformanceExecTime, defaultValue),
306 .powerUsage = ParseSystemProperty(g_OperandTypeFloat16PerformancePowerUsage, defaultValue)
307 });
308
Kevin Mayec1e5b82020-02-26 17:00:39 +0000309 update(&capabilities.operandPerformance, V1_2::OperandType::TENSOR_QUANT8_ASYMM,
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100310 {
311 .execTime = ParseSystemProperty(g_OperandTypeTensorQuant8AsymmPerformanceExecTime, defaultValue),
312 .powerUsage = ParseSystemProperty(g_OperandTypeTensorQuant8AsymmPerformancePowerUsage, defaultValue)
313 });
314
Kevin Mayec1e5b82020-02-26 17:00:39 +0000315 update(&capabilities.operandPerformance, V1_2::OperandType::TENSOR_QUANT8_SYMM,
Pablo Tellofb45e2f2019-10-18 16:51:57 +0100316 {
317 .execTime = ParseSystemProperty(g_OperandTypeTensorQuant8SymmPerformanceExecTime, defaultValue),
318 .powerUsage = ParseSystemProperty(g_OperandTypeTensorQuant8SymmPerformancePowerUsage, defaultValue)
319 });
320
Kevin Mayec1e5b82020-02-26 17:00:39 +0000321 update(&capabilities.operandPerformance, V1_2::OperandType::TENSOR_QUANT16_SYMM,
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100322 {
323 .execTime = ParseSystemProperty(g_OperandTypeTensorQuant16SymmPerformanceExecTime, defaultValue),
324 .powerUsage = ParseSystemProperty(g_OperandTypeTensorQuant16SymmPerformancePowerUsage, defaultValue)
325 });
326
Kevin Mayec1e5b82020-02-26 17:00:39 +0000327 update(&capabilities.operandPerformance, V1_2::OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL,
Kevin May87cb7612019-11-11 17:30:35 +0000328 {
329 .execTime =
330 ParseSystemProperty(g_OperandTypeTensorQuant8SymmPerChannelPerformanceExecTime, defaultValue),
331 .powerUsage =
332 ParseSystemProperty(g_OperandTypeTensorQuant8SymmPerChannelPerformancePowerUsage, defaultValue)
333 });
334
Kevin Mayec1e5b82020-02-26 17:00:39 +0000335 update(&capabilities.operandPerformance, V1_2::OperandType::TENSOR_INT32,
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100336 {
337 .execTime = ParseSystemProperty(g_OperandTypeTensorInt32PerformanceExecTime, defaultValue),
338 .powerUsage = ParseSystemProperty(g_OperandTypeTensorInt32PerformancePowerUsage, defaultValue)
339 });
340
Kevin Mayec1e5b82020-02-26 17:00:39 +0000341 update(&capabilities.operandPerformance, V1_2::OperandType::INT32,
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100342 {
343 .execTime = ParseSystemProperty(g_OperandTypeInt32PerformanceExecTime, defaultValue),
344 .powerUsage = ParseSystemProperty(g_OperandTypeInt32PerformancePowerUsage, defaultValue)
345 });
Mike Kellyb5fdf382019-06-11 16:35:25 +0100346
Kevin Mayec1e5b82020-02-26 17:00:39 +0000347 cb(V1_0::ErrorStatus::NONE, capabilities);
Mike Kellyb5fdf382019-06-11 16:35:25 +0100348 }
349 else
350 {
Kevin May2eaa1192020-04-15 16:50:57 +0100351 capabilities.relaxedFloat32toFloat16PerformanceScalar.execTime = 0;
352 capabilities.relaxedFloat32toFloat16PerformanceScalar.powerUsage = 0;
353 capabilities.relaxedFloat32toFloat16PerformanceTensor.execTime = 0;
354 capabilities.relaxedFloat32toFloat16PerformanceTensor.powerUsage = 0;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100355
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100356 // Set the base value for all operand types
Sadik Armagan188675f2021-02-12 17:16:42 +0000357 #if defined(ARMNN_ANDROID_R) || defined(ARMNN_ANDROID_S)
Kevin Mayec1e5b82020-02-26 17:00:39 +0000358 capabilities.operandPerformance = nonExtensionOperandPerformance<HalVersion::V1_2>({0.f, 0.0f});
359 #else
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100360 capabilities.operandPerformance = nonExtensionOperandPerformance({0.f, 0.0f});
Kevin Mayec1e5b82020-02-26 17:00:39 +0000361 #endif
Ferran Balaguerd7c8eb92019-07-01 13:37:44 +0100362
Kevin Mayec1e5b82020-02-26 17:00:39 +0000363 cb(V1_0::ErrorStatus::DEVICE_UNAVAILABLE, capabilities);
Mike Kellyb5fdf382019-06-11 16:35:25 +0100364 }
365
366 return Void();
367}
368
369} // namespace hal_1_2
Kevin Mayec1e5b82020-02-26 17:00:39 +0000370} // namespace armnn_driver