blob: ec0eaf90f89de49ad8af43d252976914d4ccd63a [file] [log] [blame]
Francis Murtaghbee4bc92019-06-18 12:30:37 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#include <armnn/ArmNN.hpp>
6#include <armnn/TypesUtils.hpp>
alered0172b41562020-05-07 14:58:29 +01007#include <armnn/utility/Timer.hpp>
Francis Murtaghbee4bc92019-06-18 12:30:37 +01008
9#if defined(ARMNN_SERIALIZER)
10#include "armnnDeserializer/IDeserializer.hpp"
11#endif
12#if defined(ARMNN_CAFFE_PARSER)
13#include "armnnCaffeParser/ICaffeParser.hpp"
14#endif
15#if defined(ARMNN_TF_PARSER)
16#include "armnnTfParser/ITfParser.hpp"
17#endif
18#if defined(ARMNN_TF_LITE_PARSER)
19#include "armnnTfLiteParser/ITfLiteParser.hpp"
20#endif
21#if defined(ARMNN_ONNX_PARSER)
22#include "armnnOnnxParser/IOnnxParser.hpp"
23#endif
24#include "CsvReader.hpp"
25#include "../InferenceTest.hpp"
26
Francis Murtaghbee4bc92019-06-18 12:30:37 +010027#include <Profiling.hpp>
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +010028#include <ResolveType.hpp>
Francis Murtaghbee4bc92019-06-18 12:30:37 +010029
Francis Murtaghbee4bc92019-06-18 12:30:37 +010030#include <boost/program_options.hpp>
31#include <boost/variant.hpp>
32
33#include <iostream>
34#include <fstream>
35#include <functional>
36#include <future>
37#include <algorithm>
38#include <iterator>
39
40namespace
41{
42
43// Configure boost::program_options for command-line parsing and validation.
44namespace po = boost::program_options;
45
46template<typename T, typename TParseElementFunc>
47std::vector<T> ParseArrayImpl(std::istream& stream, TParseElementFunc parseElementFunc, const char * chars = "\t ,:")
48{
49 std::vector<T> result;
50 // Processes line-by-line.
51 std::string line;
52 while (std::getline(stream, line))
53 {
David Monahana8837bf2020-04-16 10:01:56 +010054 std::vector<std::string> tokens = armnn::stringUtils::StringTokenizer(line, chars);
Francis Murtaghbee4bc92019-06-18 12:30:37 +010055 for (const std::string& token : tokens)
56 {
57 if (!token.empty()) // See https://stackoverflow.com/questions/10437406/
58 {
59 try
60 {
61 result.push_back(parseElementFunc(token));
62 }
63 catch (const std::exception&)
64 {
Derek Lamberti08446972019-11-26 16:38:31 +000065 ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +010066 }
67 }
68 }
69 }
70
71 return result;
72}
73
74bool CheckOption(const po::variables_map& vm,
75 const char* option)
76{
77 // Check that the given option is valid.
78 if (option == nullptr)
79 {
80 return false;
81 }
82
83 // Check whether 'option' is provided.
84 return vm.find(option) != vm.end();
85}
86
87void CheckOptionDependency(const po::variables_map& vm,
88 const char* option,
89 const char* required)
90{
91 // Check that the given options are valid.
92 if (option == nullptr || required == nullptr)
93 {
94 throw po::error("Invalid option to check dependency for");
95 }
96
97 // Check that if 'option' is provided, 'required' is also provided.
98 if (CheckOption(vm, option) && !vm[option].defaulted())
99 {
100 if (CheckOption(vm, required) == 0 || vm[required].defaulted())
101 {
102 throw po::error(std::string("Option '") + option + "' requires option '" + required + "'.");
103 }
104 }
105}
106
107void CheckOptionDependencies(const po::variables_map& vm)
108{
109 CheckOptionDependency(vm, "model-path", "model-format");
110 CheckOptionDependency(vm, "model-path", "input-name");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100111 CheckOptionDependency(vm, "model-path", "output-name");
112 CheckOptionDependency(vm, "input-tensor-shape", "model-path");
113}
114
115template<armnn::DataType NonQuantizedType>
116auto ParseDataArray(std::istream & stream);
117
118template<armnn::DataType QuantizedType>
119auto ParseDataArray(std::istream& stream,
120 const float& quantizationScale,
121 const int32_t& quantizationOffset);
122
123template<>
124auto ParseDataArray<armnn::DataType::Float32>(std::istream & stream)
125{
126 return ParseArrayImpl<float>(stream, [](const std::string& s) { return std::stof(s); });
127}
128
129template<>
130auto ParseDataArray<armnn::DataType::Signed32>(std::istream & stream)
131{
132 return ParseArrayImpl<int>(stream, [](const std::string & s) { return std::stoi(s); });
133}
134
135template<>
Derek Lambertif90c56d2020-01-10 17:14:08 +0000136auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream)
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100137{
138 return ParseArrayImpl<uint8_t>(stream,
139 [](const std::string& s) { return boost::numeric_cast<uint8_t>(std::stoi(s)); });
140}
141
142template<>
Derek Lambertif90c56d2020-01-10 17:14:08 +0000143auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100144 const float& quantizationScale,
145 const int32_t& quantizationOffset)
146{
147 return ParseArrayImpl<uint8_t>(stream,
148 [&quantizationScale, &quantizationOffset](const std::string & s)
149 {
150 return boost::numeric_cast<uint8_t>(
Rob Hughes93667b12019-09-23 16:24:05 +0100151 armnn::Quantize<uint8_t>(std::stof(s),
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100152 quantizationScale,
153 quantizationOffset));
154 });
155}
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100156std::vector<unsigned int> ParseArray(std::istream& stream)
157{
158 return ParseArrayImpl<unsigned int>(stream,
159 [](const std::string& s) { return boost::numeric_cast<unsigned int>(std::stoi(s)); });
160}
161
162std::vector<std::string> ParseStringList(const std::string & inputString, const char * delimiter)
163{
164 std::stringstream stream(inputString);
David Monahana8837bf2020-04-16 10:01:56 +0100165 return ParseArrayImpl<std::string>(stream, [](const std::string& s) {
166 return armnn::stringUtils::StringTrimCopy(s); }, delimiter);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100167}
168
169void RemoveDuplicateDevices(std::vector<armnn::BackendId>& computeDevices)
170{
171 // Mark the duplicate devices as 'Undefined'.
172 for (auto i = computeDevices.begin(); i != computeDevices.end(); ++i)
173 {
174 for (auto j = std::next(i); j != computeDevices.end(); ++j)
175 {
176 if (*j == *i)
177 {
178 *j = armnn::Compute::Undefined;
179 }
180 }
181 }
182
183 // Remove 'Undefined' devices.
184 computeDevices.erase(std::remove(computeDevices.begin(), computeDevices.end(), armnn::Compute::Undefined),
185 computeDevices.end());
186}
187
188struct TensorPrinter : public boost::static_visitor<>
189{
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000190 TensorPrinter(const std::string& binding,
191 const armnn::TensorInfo& info,
192 const std::string& outputTensorFile,
193 bool dequantizeOutput)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100194 : m_OutputBinding(binding)
195 , m_Scale(info.GetQuantizationScale())
196 , m_Offset(info.GetQuantizationOffset())
Sadik Armagan77086282019-09-02 11:46:28 +0100197 , m_OutputTensorFile(outputTensorFile)
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000198 , m_DequantizeOutput(dequantizeOutput)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100199 {}
200
201 void operator()(const std::vector<float>& values)
202 {
Sadik Armagan77086282019-09-02 11:46:28 +0100203 ForEachValue(values, [](float value)
204 {
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100205 printf("%f ", value);
206 });
Sadik Armagan77086282019-09-02 11:46:28 +0100207 WriteToFile(values);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100208 }
209
210 void operator()(const std::vector<uint8_t>& values)
211 {
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000212 if(m_DequantizeOutput)
213 {
214 auto& scale = m_Scale;
215 auto& offset = m_Offset;
216 std::vector<float> dequantizedValues;
217 ForEachValue(values, [&scale, &offset, &dequantizedValues](uint8_t value)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100218 {
Sadik Armagan77086282019-09-02 11:46:28 +0100219 auto dequantizedValue = armnn::Dequantize(value, scale, offset);
220 printf("%f ", dequantizedValue);
221 dequantizedValues.push_back(dequantizedValue);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100222 });
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000223 WriteToFile(dequantizedValues);
224 }
225 else
226 {
227 const std::vector<int> intValues(values.begin(), values.end());
228 operator()(intValues);
229 }
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100230 }
231
232 void operator()(const std::vector<int>& values)
233 {
234 ForEachValue(values, [](int value)
235 {
236 printf("%d ", value);
237 });
Sadik Armagan77086282019-09-02 11:46:28 +0100238 WriteToFile(values);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100239 }
240
241private:
242 template<typename Container, typename Delegate>
243 void ForEachValue(const Container& c, Delegate delegate)
244 {
245 std::cout << m_OutputBinding << ": ";
246 for (const auto& value : c)
247 {
248 delegate(value);
249 }
250 printf("\n");
251 }
252
Sadik Armagan77086282019-09-02 11:46:28 +0100253 template<typename T>
254 void WriteToFile(const std::vector<T>& values)
255 {
256 if (!m_OutputTensorFile.empty())
257 {
258 std::ofstream outputTensorFile;
259 outputTensorFile.open(m_OutputTensorFile, std::ofstream::out | std::ofstream::trunc);
260 if (outputTensorFile.is_open())
261 {
262 outputTensorFile << m_OutputBinding << ": ";
263 std::copy(values.begin(), values.end(), std::ostream_iterator<T>(outputTensorFile, " "));
264 }
265 else
266 {
Derek Lamberti08446972019-11-26 16:38:31 +0000267 ARMNN_LOG(info) << "Output Tensor File: " << m_OutputTensorFile << " could not be opened!";
Sadik Armagan77086282019-09-02 11:46:28 +0100268 }
269 outputTensorFile.close();
270 }
271 }
272
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100273 std::string m_OutputBinding;
274 float m_Scale=0.0f;
275 int m_Offset=0;
Sadik Armagan77086282019-09-02 11:46:28 +0100276 std::string m_OutputTensorFile;
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000277 bool m_DequantizeOutput = false;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100278};
279
280
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100281
282template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
283std::vector<T> GenerateDummyTensorData(unsigned int numElements)
284{
285 return std::vector<T>(numElements, static_cast<T>(0));
286}
287
288using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
289using QuantizationParams = std::pair<float, int32_t>;
290
291void PopulateTensorWithData(TContainer& tensorData,
292 unsigned int numElements,
293 const std::string& dataTypeStr,
294 const armnn::Optional<QuantizationParams>& qParams,
295 const armnn::Optional<std::string>& dataFile)
296{
297 const bool readFromFile = dataFile.has_value() && !dataFile.value().empty();
298 const bool quantizeData = qParams.has_value();
299
300 std::ifstream inputTensorFile;
301 if (readFromFile)
302 {
303 inputTensorFile = std::ifstream(dataFile.value());
304 }
305
306 if (dataTypeStr.compare("float") == 0)
307 {
308 if (quantizeData)
309 {
310 const float qScale = qParams.value().first;
311 const int qOffset = qParams.value().second;
312
313 tensorData = readFromFile ?
Derek Lambertif90c56d2020-01-10 17:14:08 +0000314 ParseDataArray<armnn::DataType::QAsymmU8>(inputTensorFile, qScale, qOffset) :
315 GenerateDummyTensorData<armnn::DataType::QAsymmU8>(numElements);
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100316 }
317 else
318 {
319 tensorData = readFromFile ?
320 ParseDataArray<armnn::DataType::Float32>(inputTensorFile) :
321 GenerateDummyTensorData<armnn::DataType::Float32>(numElements);
322 }
323 }
324 else if (dataTypeStr.compare("int") == 0)
325 {
326 tensorData = readFromFile ?
327 ParseDataArray<armnn::DataType::Signed32>(inputTensorFile) :
328 GenerateDummyTensorData<armnn::DataType::Signed32>(numElements);
329 }
330 else if (dataTypeStr.compare("qasymm8") == 0)
331 {
332 tensorData = readFromFile ?
Derek Lambertif90c56d2020-01-10 17:14:08 +0000333 ParseDataArray<armnn::DataType::QAsymmU8>(inputTensorFile) :
334 GenerateDummyTensorData<armnn::DataType::QAsymmU8>(numElements);
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100335 }
336 else
337 {
338 std::string errorMessage = "Unsupported tensor data type " + dataTypeStr;
Derek Lamberti08446972019-11-26 16:38:31 +0000339 ARMNN_LOG(fatal) << errorMessage;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100340
341 inputTensorFile.close();
342 throw armnn::Exception(errorMessage);
343 }
344
345 inputTensorFile.close();
346}
347
348} // anonymous namespace
349
350bool generateTensorData = true;
351
352struct ExecuteNetworkParams
353{
354 using TensorShapePtr = std::unique_ptr<armnn::TensorShape>;
355
356 const char* m_ModelPath;
357 bool m_IsModelBinary;
358 std::vector<armnn::BackendId> m_ComputeDevices;
359 std::string m_DynamicBackendsPath;
360 std::vector<string> m_InputNames;
361 std::vector<TensorShapePtr> m_InputTensorShapes;
362 std::vector<string> m_InputTensorDataFilePaths;
363 std::vector<string> m_InputTypes;
364 bool m_QuantizeInput;
365 std::vector<string> m_OutputTypes;
366 std::vector<string> m_OutputNames;
367 std::vector<string> m_OutputTensorFiles;
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000368 bool m_DequantizeOutput;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100369 bool m_EnableProfiling;
370 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000371 bool m_EnableBf16TurboMode;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100372 double m_ThresholdTime;
373 bool m_PrintIntermediate;
374 size_t m_SubgraphId;
375 bool m_EnableLayerDetails = false;
376 bool m_GenerateTensorData;
Derek Lamberti132563c2019-12-02 16:06:40 +0000377 bool m_ParseUnsupported = false;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100378};
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100379
380template<typename TParser, typename TDataType>
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100381int MainImpl(const ExecuteNetworkParams& params,
alered0172b41562020-05-07 14:58:29 +0100382 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr,
383 size_t iterations = 1)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100384{
385 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
386
387 std::vector<TContainer> inputDataContainers;
388
389 try
390 {
391 // Creates an InferenceModel, which will parse the model and load it into an IRuntime.
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100392 typename InferenceModel<TParser, TDataType>::Params inferenceModelParams;
393 inferenceModelParams.m_ModelPath = params.m_ModelPath;
394 inferenceModelParams.m_IsModelBinary = params.m_IsModelBinary;
395 inferenceModelParams.m_ComputeDevices = params.m_ComputeDevices;
396 inferenceModelParams.m_DynamicBackendsPath = params.m_DynamicBackendsPath;
397 inferenceModelParams.m_PrintIntermediateLayers = params.m_PrintIntermediate;
398 inferenceModelParams.m_VisualizePostOptimizationModel = params.m_EnableLayerDetails;
Derek Lamberti167c0822019-12-03 09:37:32 +0000399 inferenceModelParams.m_ParseUnsupported = params.m_ParseUnsupported;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100400
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100401 for(const std::string& inputName: params.m_InputNames)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100402 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100403 inferenceModelParams.m_InputBindings.push_back(inputName);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100404 }
405
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100406 for(unsigned int i = 0; i < params.m_InputTensorShapes.size(); ++i)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100407 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100408 inferenceModelParams.m_InputShapes.push_back(*params.m_InputTensorShapes[i]);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100409 }
410
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100411 for(const std::string& outputName: params.m_OutputNames)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100412 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100413 inferenceModelParams.m_OutputBindings.push_back(outputName);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100414 }
415
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100416 inferenceModelParams.m_SubgraphId = params.m_SubgraphId;
417 inferenceModelParams.m_EnableFp16TurboMode = params.m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000418 inferenceModelParams.m_EnableBf16TurboMode = params.m_EnableBf16TurboMode;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100419
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100420 InferenceModel<TParser, TDataType> model(inferenceModelParams,
421 params.m_EnableProfiling,
422 params.m_DynamicBackendsPath,
423 runtime);
424
425 const size_t numInputs = inferenceModelParams.m_InputBindings.size();
426 for(unsigned int i = 0; i < numInputs; ++i)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100427 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100428 armnn::Optional<QuantizationParams> qParams = params.m_QuantizeInput ?
429 armnn::MakeOptional<QuantizationParams>(model.GetInputQuantizationParams()) :
430 armnn::EmptyOptional();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100431
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100432 armnn::Optional<std::string> dataFile = params.m_GenerateTensorData ?
433 armnn::EmptyOptional() :
434 armnn::MakeOptional<std::string>(params.m_InputTensorDataFilePaths[i]);
435
436 unsigned int numElements = model.GetInputSize(i);
437 if (params.m_InputTensorShapes.size() > i && params.m_InputTensorShapes[i])
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100438 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100439 // If the user has provided a tensor shape for the current input,
440 // override numElements
441 numElements = params.m_InputTensorShapes[i]->GetNumElements();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100442 }
443
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100444 TContainer tensorData;
445 PopulateTensorWithData(tensorData,
446 numElements,
447 params.m_InputTypes[i],
448 qParams,
449 dataFile);
450
451 inputDataContainers.push_back(tensorData);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100452 }
453
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100454 const size_t numOutputs = inferenceModelParams.m_OutputBindings.size();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100455 std::vector<TContainer> outputDataContainers;
456
457 for (unsigned int i = 0; i < numOutputs; ++i)
458 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100459 if (params.m_OutputTypes[i].compare("float") == 0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100460 {
461 outputDataContainers.push_back(std::vector<float>(model.GetOutputSize(i)));
462 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100463 else if (params.m_OutputTypes[i].compare("int") == 0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100464 {
465 outputDataContainers.push_back(std::vector<int>(model.GetOutputSize(i)));
466 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100467 else if (params.m_OutputTypes[i].compare("qasymm8") == 0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100468 {
469 outputDataContainers.push_back(std::vector<uint8_t>(model.GetOutputSize(i)));
470 }
471 else
472 {
Derek Lamberti08446972019-11-26 16:38:31 +0000473 ARMNN_LOG(fatal) << "Unsupported tensor data type \"" << params.m_OutputTypes[i] << "\". ";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100474 return EXIT_FAILURE;
475 }
476 }
477
alered0172b41562020-05-07 14:58:29 +0100478 for (size_t x = 0; x < iterations; x++)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100479 {
alered0172b41562020-05-07 14:58:29 +0100480 // model.Run returns the inference time elapsed in EnqueueWorkload (in milliseconds)
481 auto inference_duration = model.Run(inputDataContainers, outputDataContainers);
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100482
alered0172b41562020-05-07 14:58:29 +0100483 if (params.m_GenerateTensorData)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100484 {
alered0172b41562020-05-07 14:58:29 +0100485 ARMNN_LOG(warning) << "The input data was generated, note that the output will not be useful";
486 }
487
488 // Print output tensors
489 const auto& infosOut = model.GetOutputBindingInfos();
490 for (size_t i = 0; i < numOutputs; i++)
491 {
492 const armnn::TensorInfo& infoOut = infosOut[i].second;
493 auto outputTensorFile = params.m_OutputTensorFiles.empty() ? "" : params.m_OutputTensorFiles[i];
494
495 TensorPrinter printer(inferenceModelParams.m_OutputBindings[i],
496 infoOut,
497 outputTensorFile,
498 params.m_DequantizeOutput);
499 boost::apply_visitor(printer, outputDataContainers[i]);
500 }
501
502 ARMNN_LOG(info) << "\nInference time: " << std::setprecision(2)
503 << std::fixed << inference_duration.count() << " ms\n";
504
505 // If thresholdTime == 0.0 (default), then it hasn't been supplied at command line
506 if (params.m_ThresholdTime != 0.0)
507 {
508 ARMNN_LOG(info) << "Threshold time: " << std::setprecision(2)
509 << std::fixed << params.m_ThresholdTime << " ms";
510 auto thresholdMinusInference = params.m_ThresholdTime - inference_duration.count();
511 ARMNN_LOG(info) << "Threshold time - Inference time: " << std::setprecision(2)
512 << std::fixed << thresholdMinusInference << " ms" << "\n";
513
514 if (thresholdMinusInference < 0)
515 {
516 std::string errorMessage = "Elapsed inference time is greater than provided threshold time.";
517 ARMNN_LOG(fatal) << errorMessage;
518 }
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100519 }
520 }
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100521 }
522 catch (armnn::Exception const& e)
523 {
Derek Lamberti08446972019-11-26 16:38:31 +0000524 ARMNN_LOG(fatal) << "Armnn Error: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100525 return EXIT_FAILURE;
526 }
527
528 return EXIT_SUCCESS;
529}
530
531// This will run a test
532int RunTest(const std::string& format,
533 const std::string& inputTensorShapesStr,
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100534 const vector<armnn::BackendId>& computeDevices,
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100535 const std::string& dynamicBackendsPath,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100536 const std::string& path,
537 const std::string& inputNames,
538 const std::string& inputTensorDataFilePaths,
539 const std::string& inputTypes,
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100540 bool quantizeInput,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100541 const std::string& outputTypes,
542 const std::string& outputNames,
Sadik Armagan77086282019-09-02 11:46:28 +0100543 const std::string& outputTensorFiles,
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000544 bool dequantizeOuput,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100545 bool enableProfiling,
546 bool enableFp16TurboMode,
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000547 bool enableBf16TurboMode,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100548 const double& thresholdTime,
Matthew Jackson54658b92019-08-27 15:35:59 +0100549 bool printIntermediate,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100550 const size_t subgraphId,
Andre Ghattas23ae2ea2019-08-07 12:18:38 +0100551 bool enableLayerDetails = false,
Derek Lamberti132563c2019-12-02 16:06:40 +0000552 bool parseUnsupported = false,
alered0172b41562020-05-07 14:58:29 +0100553 const size_t iterations = 1,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100554 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
555{
David Monahana8837bf2020-04-16 10:01:56 +0100556 std::string modelFormat = armnn::stringUtils::StringTrimCopy(format);
557 std::string modelPath = armnn::stringUtils::StringTrimCopy(path);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100558 std::vector<std::string> inputNamesVector = ParseStringList(inputNames, ",");
Francis Murtagh1555cbd2019-10-08 14:47:46 +0100559 std::vector<std::string> inputTensorShapesVector = ParseStringList(inputTensorShapesStr, ":");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100560 std::vector<std::string> inputTensorDataFilePathsVector = ParseStringList(
561 inputTensorDataFilePaths, ",");
562 std::vector<std::string> outputNamesVector = ParseStringList(outputNames, ",");
563 std::vector<std::string> inputTypesVector = ParseStringList(inputTypes, ",");
564 std::vector<std::string> outputTypesVector = ParseStringList(outputTypes, ",");
Sadik Armagan77086282019-09-02 11:46:28 +0100565 std::vector<std::string> outputTensorFilesVector = ParseStringList(outputTensorFiles, ",");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100566
567 // Parse model binary flag from the model-format string we got from the command-line
568 bool isModelBinary;
569 if (modelFormat.find("bin") != std::string::npos)
570 {
571 isModelBinary = true;
572 }
573 else if (modelFormat.find("txt") != std::string::npos || modelFormat.find("text") != std::string::npos)
574 {
575 isModelBinary = false;
576 }
577 else
578 {
Derek Lamberti08446972019-11-26 16:38:31 +0000579 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100580 return EXIT_FAILURE;
581 }
582
583 if ((inputTensorShapesVector.size() != 0) && (inputTensorShapesVector.size() != inputNamesVector.size()))
584 {
Derek Lamberti08446972019-11-26 16:38:31 +0000585 ARMNN_LOG(fatal) << "input-name and input-tensor-shape must have the same amount of elements.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100586 return EXIT_FAILURE;
587 }
588
589 if ((inputTensorDataFilePathsVector.size() != 0) &&
590 (inputTensorDataFilePathsVector.size() != inputNamesVector.size()))
591 {
Derek Lamberti08446972019-11-26 16:38:31 +0000592 ARMNN_LOG(fatal) << "input-name and input-tensor-data must have the same amount of elements.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100593 return EXIT_FAILURE;
594 }
595
Sadik Armagan77086282019-09-02 11:46:28 +0100596 if ((outputTensorFilesVector.size() != 0) &&
597 (outputTensorFilesVector.size() != outputNamesVector.size()))
598 {
Derek Lamberti08446972019-11-26 16:38:31 +0000599 ARMNN_LOG(fatal) << "output-name and write-outputs-to-file must have the same amount of elements.";
Sadik Armagan77086282019-09-02 11:46:28 +0100600 return EXIT_FAILURE;
601 }
602
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100603 if (inputTypesVector.size() == 0)
604 {
605 //Defaults the value of all inputs to "float"
606 inputTypesVector.assign(inputNamesVector.size(), "float");
607 }
Matteo Martincigh08b51862019-08-29 16:26:10 +0100608 else if ((inputTypesVector.size() != 0) && (inputTypesVector.size() != inputNamesVector.size()))
609 {
Derek Lamberti08446972019-11-26 16:38:31 +0000610 ARMNN_LOG(fatal) << "input-name and input-type must have the same amount of elements.";
Matteo Martincigh08b51862019-08-29 16:26:10 +0100611 return EXIT_FAILURE;
612 }
613
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100614 if (outputTypesVector.size() == 0)
615 {
616 //Defaults the value of all outputs to "float"
617 outputTypesVector.assign(outputNamesVector.size(), "float");
618 }
Matteo Martincigh08b51862019-08-29 16:26:10 +0100619 else if ((outputTypesVector.size() != 0) && (outputTypesVector.size() != outputNamesVector.size()))
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100620 {
Derek Lamberti08446972019-11-26 16:38:31 +0000621 ARMNN_LOG(fatal) << "output-name and output-type must have the same amount of elements.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100622 return EXIT_FAILURE;
623 }
624
625 // Parse input tensor shape from the string we got from the command-line.
626 std::vector<std::unique_ptr<armnn::TensorShape>> inputTensorShapes;
627
628 if (!inputTensorShapesVector.empty())
629 {
630 inputTensorShapes.reserve(inputTensorShapesVector.size());
631
632 for(const std::string& shape : inputTensorShapesVector)
633 {
634 std::stringstream ss(shape);
635 std::vector<unsigned int> dims = ParseArray(ss);
636
637 try
638 {
639 // Coverity fix: An exception of type armnn::InvalidArgumentException is thrown and never caught.
640 inputTensorShapes.push_back(std::make_unique<armnn::TensorShape>(dims.size(), dims.data()));
641 }
642 catch (const armnn::InvalidArgumentException& e)
643 {
Derek Lamberti08446972019-11-26 16:38:31 +0000644 ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100645 return EXIT_FAILURE;
646 }
647 }
648 }
649
650 // Check that threshold time is not less than zero
651 if (thresholdTime < 0)
652 {
Derek Lamberti08446972019-11-26 16:38:31 +0000653 ARMNN_LOG(fatal) << "Threshold time supplied as a command line argument is less than zero.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100654 return EXIT_FAILURE;
655 }
656
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100657 ExecuteNetworkParams params;
658 params.m_ModelPath = modelPath.c_str();
659 params.m_IsModelBinary = isModelBinary;
660 params.m_ComputeDevices = computeDevices;
661 params.m_DynamicBackendsPath = dynamicBackendsPath;
662 params.m_InputNames = inputNamesVector;
663 params.m_InputTensorShapes = std::move(inputTensorShapes);
664 params.m_InputTensorDataFilePaths = inputTensorDataFilePathsVector;
665 params.m_InputTypes = inputTypesVector;
666 params.m_QuantizeInput = quantizeInput;
667 params.m_OutputTypes = outputTypesVector;
668 params.m_OutputNames = outputNamesVector;
669 params.m_OutputTensorFiles = outputTensorFilesVector;
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000670 params.m_DequantizeOutput = dequantizeOuput;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100671 params.m_EnableProfiling = enableProfiling;
672 params.m_EnableFp16TurboMode = enableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000673 params.m_EnableBf16TurboMode = enableBf16TurboMode;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100674 params.m_ThresholdTime = thresholdTime;
675 params.m_PrintIntermediate = printIntermediate;
676 params.m_SubgraphId = subgraphId;
677 params.m_EnableLayerDetails = enableLayerDetails;
678 params.m_GenerateTensorData = inputTensorDataFilePathsVector.empty();
Derek Lamberti132563c2019-12-02 16:06:40 +0000679 params.m_ParseUnsupported = parseUnsupported;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100680
681 // Warn if ExecuteNetwork will generate dummy input data
682 if (params.m_GenerateTensorData)
683 {
Derek Lamberti08446972019-11-26 16:38:31 +0000684 ARMNN_LOG(warning) << "No input files provided, input tensors will be filled with 0s.";
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100685 }
686
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100687 // Forward to implementation based on the parser type
688 if (modelFormat.find("armnn") != std::string::npos)
689 {
690#if defined(ARMNN_SERIALIZER)
alered0172b41562020-05-07 14:58:29 +0100691 return MainImpl<armnnDeserializer::IDeserializer, float>(params, runtime, iterations);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100692#else
Derek Lamberti08446972019-11-26 16:38:31 +0000693 ARMNN_LOG(fatal) << "Not built with serialization support.";
alered0172b41562020-05-07 14:58:29 +0100694 return EXIT_FAILURE;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100695#endif
696 }
697 else if (modelFormat.find("caffe") != std::string::npos)
698 {
699#if defined(ARMNN_CAFFE_PARSER)
alered0172b41562020-05-07 14:58:29 +0100700 return MainImpl<armnnCaffeParser::ICaffeParser, float>(params, runtime, iterations);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100701#else
Derek Lamberti08446972019-11-26 16:38:31 +0000702 ARMNN_LOG(fatal) << "Not built with Caffe parser support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100703 return EXIT_FAILURE;
704#endif
705 }
706 else if (modelFormat.find("onnx") != std::string::npos)
alered0172b41562020-05-07 14:58:29 +0100707 {
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100708#if defined(ARMNN_ONNX_PARSER)
alered0172b41562020-05-07 14:58:29 +0100709 return MainImpl<armnnOnnxParser::IOnnxParser, float>(params, runtime, iterations);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100710#else
Derek Lamberti08446972019-11-26 16:38:31 +0000711 ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
alered0172b41562020-05-07 14:58:29 +0100712 return EXIT_FAILURE;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100713#endif
714 }
715 else if (modelFormat.find("tensorflow") != std::string::npos)
716 {
717#if defined(ARMNN_TF_PARSER)
alered0172b41562020-05-07 14:58:29 +0100718 return MainImpl<armnnTfParser::ITfParser, float>(params, runtime, iterations);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100719#else
Derek Lamberti08446972019-11-26 16:38:31 +0000720 ARMNN_LOG(fatal) << "Not built with Tensorflow parser support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100721 return EXIT_FAILURE;
722#endif
723 }
724 else if(modelFormat.find("tflite") != std::string::npos)
725 {
726#if defined(ARMNN_TF_LITE_PARSER)
727 if (! isModelBinary)
728 {
alered0172b41562020-05-07 14:58:29 +0100729 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat
730 << "'. Only 'binary' format supported for tflite files";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100731 return EXIT_FAILURE;
732 }
alered0172b41562020-05-07 14:58:29 +0100733 return MainImpl<armnnTfLiteParser::ITfLiteParser, float>(params, runtime, iterations);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100734#else
alered0172b41562020-05-07 14:58:29 +0100735 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat
736 << "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100737 return EXIT_FAILURE;
738#endif
739 }
740 else
741 {
alered0172b41562020-05-07 14:58:29 +0100742 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat
743 << "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100744 return EXIT_FAILURE;
745 }
746}
747
748int RunCsvTest(const armnnUtils::CsvRow &csvRow, const std::shared_ptr<armnn::IRuntime>& runtime,
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000749 const bool enableProfiling, const bool enableFp16TurboMode, const bool enableBf16TurboMode,
750 const double& thresholdTime, const bool printIntermediate, bool enableLayerDetails = false,
751 bool parseUnuspported = false)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100752{
Jan Eilers8eb25602020-03-09 12:13:48 +0000753 IgnoreUnused(runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100754 std::string modelFormat;
755 std::string modelPath;
756 std::string inputNames;
757 std::string inputTensorShapes;
758 std::string inputTensorDataFilePaths;
759 std::string outputNames;
760 std::string inputTypes;
761 std::string outputTypes;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100762 std::string dynamicBackendsPath;
Sadik Armagan77086282019-09-02 11:46:28 +0100763 std::string outputTensorFiles;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100764
765 size_t subgraphId = 0;
766
767 const std::string backendsMessage = std::string("The preferred order of devices to run layers on by default. ")
768 + std::string("Possible choices: ")
769 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
770
771 po::options_description desc("Options");
772 try
773 {
774 desc.add_options()
775 ("model-format,f", po::value(&modelFormat),
776 "armnn-binary, caffe-binary, caffe-text, tflite-binary, onnx-binary, onnx-text, tensorflow-binary or "
777 "tensorflow-text.")
778 ("model-path,m", po::value(&modelPath), "Path to model file, e.g. .armnn, .caffemodel, .prototxt, "
779 ".tflite, .onnx")
780 ("compute,c", po::value<std::vector<armnn::BackendId>>()->multitoken(),
781 backendsMessage.c_str())
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100782 ("dynamic-backends-path,b", po::value(&dynamicBackendsPath),
783 "Path where to load any available dynamic backend from. "
784 "If left empty (the default), dynamic backends will not be used.")
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100785 ("input-name,i", po::value(&inputNames), "Identifier of the input tensors in the network separated by comma.")
786 ("subgraph-number,n", po::value<size_t>(&subgraphId)->default_value(0), "Id of the subgraph to be "
787 "executed. Defaults to 0.")
788 ("input-tensor-shape,s", po::value(&inputTensorShapes),
789 "The shape of the input tensors in the network as a flat array of integers separated by comma. "
790 "Several shapes can be passed separating them by semicolon. "
791 "This parameter is optional, depending on the network.")
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100792 ("input-tensor-data,d", po::value(&inputTensorDataFilePaths)->default_value(""),
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100793 "Path to files containing the input data as a flat array separated by whitespace. "
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100794 "Several paths can be passed separating them by comma. If not specified, the network will be run with dummy "
795 "data (useful for profiling).")
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100796 ("input-type,y",po::value(&inputTypes), "The type of the input tensors in the network separated by comma. "
797 "If unset, defaults to \"float\" for all defined inputs. "
798 "Accepted values (float, int or qasymm8).")
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100799 ("quantize-input,q",po::bool_switch()->default_value(false),
800 "If this option is enabled, all float inputs will be quantized to qasymm8. "
801 "If unset, default to not quantized. "
802 "Accepted values (true or false)")
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100803 ("output-type,z",po::value(&outputTypes), "The type of the output tensors in the network separated by comma. "
804 "If unset, defaults to \"float\" for all defined outputs. "
805 "Accepted values (float, int or qasymm8).")
806 ("output-name,o", po::value(&outputNames),
Sadik Armagan77086282019-09-02 11:46:28 +0100807 "Identifier of the output tensors in the network separated by comma.")
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000808 ("dequantize-output,l",po::bool_switch()->default_value(false),
809 "If this option is enabled, all quantized outputs will be dequantized to float. "
810 "If unset, default to not get dequantized. "
811 "Accepted values (true or false)")
Sadik Armagan77086282019-09-02 11:46:28 +0100812 ("write-outputs-to-file,w", po::value(&outputTensorFiles),
813 "Comma-separated list of output file paths keyed with the binding-id of the output slot. "
814 "If left empty (the default), the output tensors will not be written to a file.");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100815 }
816 catch (const std::exception& e)
817 {
818 // Coverity points out that default_value(...) can throw a bad_lexical_cast,
819 // and that desc.add_options() can throw boost::io::too_few_args.
820 // They really won't in any of these cases.
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100821 ARMNN_ASSERT_MSG(false, "Caught unexpected exception");
Derek Lamberti08446972019-11-26 16:38:31 +0000822 ARMNN_LOG(fatal) << "Fatal internal error: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100823 return EXIT_FAILURE;
824 }
825
826 std::vector<const char*> clOptions;
827 clOptions.reserve(csvRow.values.size());
828 for (const std::string& value : csvRow.values)
829 {
830 clOptions.push_back(value.c_str());
831 }
832
833 po::variables_map vm;
834 try
835 {
836 po::store(po::parse_command_line(static_cast<int>(clOptions.size()), clOptions.data(), desc), vm);
837
838 po::notify(vm);
839
840 CheckOptionDependencies(vm);
841 }
842 catch (const po::error& e)
843 {
844 std::cerr << e.what() << std::endl << std::endl;
845 std::cerr << desc << std::endl;
846 return EXIT_FAILURE;
847 }
848
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100849 // Get the value of the switch arguments.
850 bool quantizeInput = vm["quantize-input"].as<bool>();
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000851 bool dequantizeOutput = vm["dequantize-output"].as<bool>();
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100852
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100853 // Get the preferred order of compute devices.
854 std::vector<armnn::BackendId> computeDevices = vm["compute"].as<std::vector<armnn::BackendId>>();
855
856 // Remove duplicates from the list of compute devices.
857 RemoveDuplicateDevices(computeDevices);
858
859 // Check that the specified compute devices are valid.
860 std::string invalidBackends;
861 if (!CheckRequestedBackendsAreValid(computeDevices, armnn::Optional<std::string&>(invalidBackends)))
862 {
Derek Lamberti08446972019-11-26 16:38:31 +0000863 ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100864 << invalidBackends;
865 return EXIT_FAILURE;
866 }
867
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100868 return RunTest(modelFormat, inputTensorShapes, computeDevices, dynamicBackendsPath, modelPath, inputNames,
Sadik Armagan77086282019-09-02 11:46:28 +0100869 inputTensorDataFilePaths, inputTypes, quantizeInput, outputTypes, outputNames, outputTensorFiles,
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000870 dequantizeOutput, enableProfiling, enableFp16TurboMode, enableBf16TurboMode,
871 thresholdTime, printIntermediate, subgraphId, enableLayerDetails, parseUnuspported);
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100872}
alered0172b41562020-05-07 14:58:29 +0100873
874#if defined(ARMCOMPUTECL_ENABLED)
875int RunCLTuning(const std::string& tuningPath,
876 const int tuningLevel,
877 const std::string& modelFormat,
878 const std::string& inputTensorShapes,
879 const vector<armnn::BackendId>& computeDevices,
880 const std::string& dynamicBackendsPath,
881 const std::string& modelPath,
882 const std::string& inputNames,
883 const std::string& inputTensorDataFilePaths,
884 const std::string& inputTypes,
885 bool quantizeInput,
886 const std::string& outputTypes,
887 const std::string& outputNames,
888 const std::string& outputTensorFiles,
889 bool dequantizeOutput,
890 bool enableProfiling,
891 bool enableFp16TurboMode,
892 bool enableBf16TurboMode,
893 const double& thresholdTime,
894 bool printIntermediate,
895 const size_t subgraphId,
896 bool enableLayerDetails = false,
897 bool parseUnsupported = false)
898{
899 armnn::IRuntime::CreationOptions options;
900 options.m_BackendOptions.emplace_back(
901 armnn::BackendOptions
902 {
903 "GpuAcc",
904 {
905 {"TuningLevel", tuningLevel},
906 {"TuningFile", tuningPath.c_str()},
907 {"KernelProfilingEnabled", enableProfiling}
908 }
909 }
910 );
911
912 std::shared_ptr<armnn::IRuntime> runtime(armnn::IRuntime::Create(options));
913 const auto start_time = armnn::GetTimeNow();
914
915 ARMNN_LOG(info) << "Tuning run...\n";
916 int state = RunTest(modelFormat, inputTensorShapes, computeDevices, dynamicBackendsPath, modelPath, inputNames,
917 inputTensorDataFilePaths, inputTypes, quantizeInput, outputTypes, outputNames,
918 outputTensorFiles, dequantizeOutput, enableProfiling, enableFp16TurboMode, enableBf16TurboMode,
919 thresholdTime, printIntermediate, subgraphId, enableLayerDetails, parseUnsupported, 1, runtime);
920
921 ARMNN_LOG(info) << "Tuning time: " << std::setprecision(2)
922 << std::fixed << armnn::GetTimeDuration(start_time).count() << " ms\n";
923
924 return state;
925}
926#endif