blob: d7e927591694987f3ed147bc7a3bde0d2e2fb5bb [file] [log] [blame]
Francis Murtaghbee4bc92019-06-18 12:30:37 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#include <armnn/ArmNN.hpp>
6#include <armnn/TypesUtils.hpp>
7
8#if defined(ARMNN_SERIALIZER)
9#include "armnnDeserializer/IDeserializer.hpp"
10#endif
11#if defined(ARMNN_CAFFE_PARSER)
12#include "armnnCaffeParser/ICaffeParser.hpp"
13#endif
14#if defined(ARMNN_TF_PARSER)
15#include "armnnTfParser/ITfParser.hpp"
16#endif
17#if defined(ARMNN_TF_LITE_PARSER)
18#include "armnnTfLiteParser/ITfLiteParser.hpp"
19#endif
20#if defined(ARMNN_ONNX_PARSER)
21#include "armnnOnnxParser/IOnnxParser.hpp"
22#endif
23#include "CsvReader.hpp"
24#include "../InferenceTest.hpp"
25
Francis Murtaghbee4bc92019-06-18 12:30:37 +010026#include <Profiling.hpp>
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +010027#include <ResolveType.hpp>
Francis Murtaghbee4bc92019-06-18 12:30:37 +010028
29#include <boost/algorithm/string/trim.hpp>
30#include <boost/algorithm/string/split.hpp>
31#include <boost/algorithm/string/classification.hpp>
32#include <boost/program_options.hpp>
33#include <boost/variant.hpp>
34
35#include <iostream>
36#include <fstream>
37#include <functional>
38#include <future>
39#include <algorithm>
40#include <iterator>
41
42namespace
43{
44
45// Configure boost::program_options for command-line parsing and validation.
46namespace po = boost::program_options;
47
48template<typename T, typename TParseElementFunc>
49std::vector<T> ParseArrayImpl(std::istream& stream, TParseElementFunc parseElementFunc, const char * chars = "\t ,:")
50{
51 std::vector<T> result;
52 // Processes line-by-line.
53 std::string line;
54 while (std::getline(stream, line))
55 {
56 std::vector<std::string> tokens;
57 try
58 {
59 // Coverity fix: boost::split() may throw an exception of type boost::bad_function_call.
60 boost::split(tokens, line, boost::algorithm::is_any_of(chars), boost::token_compress_on);
61 }
62 catch (const std::exception& e)
63 {
Derek Lamberti08446972019-11-26 16:38:31 +000064 ARMNN_LOG(error) << "An error occurred when splitting tokens: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +010065 continue;
66 }
67 for (const std::string& token : tokens)
68 {
69 if (!token.empty()) // See https://stackoverflow.com/questions/10437406/
70 {
71 try
72 {
73 result.push_back(parseElementFunc(token));
74 }
75 catch (const std::exception&)
76 {
Derek Lamberti08446972019-11-26 16:38:31 +000077 ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +010078 }
79 }
80 }
81 }
82
83 return result;
84}
85
86bool CheckOption(const po::variables_map& vm,
87 const char* option)
88{
89 // Check that the given option is valid.
90 if (option == nullptr)
91 {
92 return false;
93 }
94
95 // Check whether 'option' is provided.
96 return vm.find(option) != vm.end();
97}
98
99void CheckOptionDependency(const po::variables_map& vm,
100 const char* option,
101 const char* required)
102{
103 // Check that the given options are valid.
104 if (option == nullptr || required == nullptr)
105 {
106 throw po::error("Invalid option to check dependency for");
107 }
108
109 // Check that if 'option' is provided, 'required' is also provided.
110 if (CheckOption(vm, option) && !vm[option].defaulted())
111 {
112 if (CheckOption(vm, required) == 0 || vm[required].defaulted())
113 {
114 throw po::error(std::string("Option '") + option + "' requires option '" + required + "'.");
115 }
116 }
117}
118
119void CheckOptionDependencies(const po::variables_map& vm)
120{
121 CheckOptionDependency(vm, "model-path", "model-format");
122 CheckOptionDependency(vm, "model-path", "input-name");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100123 CheckOptionDependency(vm, "model-path", "output-name");
124 CheckOptionDependency(vm, "input-tensor-shape", "model-path");
125}
126
127template<armnn::DataType NonQuantizedType>
128auto ParseDataArray(std::istream & stream);
129
130template<armnn::DataType QuantizedType>
131auto ParseDataArray(std::istream& stream,
132 const float& quantizationScale,
133 const int32_t& quantizationOffset);
134
135template<>
136auto ParseDataArray<armnn::DataType::Float32>(std::istream & stream)
137{
138 return ParseArrayImpl<float>(stream, [](const std::string& s) { return std::stof(s); });
139}
140
141template<>
142auto ParseDataArray<armnn::DataType::Signed32>(std::istream & stream)
143{
144 return ParseArrayImpl<int>(stream, [](const std::string & s) { return std::stoi(s); });
145}
146
147template<>
Derek Lambertif90c56d2020-01-10 17:14:08 +0000148auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream)
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100149{
150 return ParseArrayImpl<uint8_t>(stream,
151 [](const std::string& s) { return boost::numeric_cast<uint8_t>(std::stoi(s)); });
152}
153
154template<>
Derek Lambertif90c56d2020-01-10 17:14:08 +0000155auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100156 const float& quantizationScale,
157 const int32_t& quantizationOffset)
158{
159 return ParseArrayImpl<uint8_t>(stream,
160 [&quantizationScale, &quantizationOffset](const std::string & s)
161 {
162 return boost::numeric_cast<uint8_t>(
Rob Hughes93667b12019-09-23 16:24:05 +0100163 armnn::Quantize<uint8_t>(std::stof(s),
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100164 quantizationScale,
165 quantizationOffset));
166 });
167}
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100168std::vector<unsigned int> ParseArray(std::istream& stream)
169{
170 return ParseArrayImpl<unsigned int>(stream,
171 [](const std::string& s) { return boost::numeric_cast<unsigned int>(std::stoi(s)); });
172}
173
174std::vector<std::string> ParseStringList(const std::string & inputString, const char * delimiter)
175{
176 std::stringstream stream(inputString);
177 return ParseArrayImpl<std::string>(stream, [](const std::string& s) { return boost::trim_copy(s); }, delimiter);
178}
179
180void RemoveDuplicateDevices(std::vector<armnn::BackendId>& computeDevices)
181{
182 // Mark the duplicate devices as 'Undefined'.
183 for (auto i = computeDevices.begin(); i != computeDevices.end(); ++i)
184 {
185 for (auto j = std::next(i); j != computeDevices.end(); ++j)
186 {
187 if (*j == *i)
188 {
189 *j = armnn::Compute::Undefined;
190 }
191 }
192 }
193
194 // Remove 'Undefined' devices.
195 computeDevices.erase(std::remove(computeDevices.begin(), computeDevices.end(), armnn::Compute::Undefined),
196 computeDevices.end());
197}
198
199struct TensorPrinter : public boost::static_visitor<>
200{
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000201 TensorPrinter(const std::string& binding,
202 const armnn::TensorInfo& info,
203 const std::string& outputTensorFile,
204 bool dequantizeOutput)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100205 : m_OutputBinding(binding)
206 , m_Scale(info.GetQuantizationScale())
207 , m_Offset(info.GetQuantizationOffset())
Sadik Armagan77086282019-09-02 11:46:28 +0100208 , m_OutputTensorFile(outputTensorFile)
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000209 , m_DequantizeOutput(dequantizeOutput)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100210 {}
211
212 void operator()(const std::vector<float>& values)
213 {
Sadik Armagan77086282019-09-02 11:46:28 +0100214 ForEachValue(values, [](float value)
215 {
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100216 printf("%f ", value);
217 });
Sadik Armagan77086282019-09-02 11:46:28 +0100218 WriteToFile(values);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100219 }
220
221 void operator()(const std::vector<uint8_t>& values)
222 {
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000223 if(m_DequantizeOutput)
224 {
225 auto& scale = m_Scale;
226 auto& offset = m_Offset;
227 std::vector<float> dequantizedValues;
228 ForEachValue(values, [&scale, &offset, &dequantizedValues](uint8_t value)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100229 {
Sadik Armagan77086282019-09-02 11:46:28 +0100230 auto dequantizedValue = armnn::Dequantize(value, scale, offset);
231 printf("%f ", dequantizedValue);
232 dequantizedValues.push_back(dequantizedValue);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100233 });
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000234 WriteToFile(dequantizedValues);
235 }
236 else
237 {
238 const std::vector<int> intValues(values.begin(), values.end());
239 operator()(intValues);
240 }
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100241 }
242
243 void operator()(const std::vector<int>& values)
244 {
245 ForEachValue(values, [](int value)
246 {
247 printf("%d ", value);
248 });
Sadik Armagan77086282019-09-02 11:46:28 +0100249 WriteToFile(values);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100250 }
251
252private:
253 template<typename Container, typename Delegate>
254 void ForEachValue(const Container& c, Delegate delegate)
255 {
256 std::cout << m_OutputBinding << ": ";
257 for (const auto& value : c)
258 {
259 delegate(value);
260 }
261 printf("\n");
262 }
263
Sadik Armagan77086282019-09-02 11:46:28 +0100264 template<typename T>
265 void WriteToFile(const std::vector<T>& values)
266 {
267 if (!m_OutputTensorFile.empty())
268 {
269 std::ofstream outputTensorFile;
270 outputTensorFile.open(m_OutputTensorFile, std::ofstream::out | std::ofstream::trunc);
271 if (outputTensorFile.is_open())
272 {
273 outputTensorFile << m_OutputBinding << ": ";
274 std::copy(values.begin(), values.end(), std::ostream_iterator<T>(outputTensorFile, " "));
275 }
276 else
277 {
Derek Lamberti08446972019-11-26 16:38:31 +0000278 ARMNN_LOG(info) << "Output Tensor File: " << m_OutputTensorFile << " could not be opened!";
Sadik Armagan77086282019-09-02 11:46:28 +0100279 }
280 outputTensorFile.close();
281 }
282 }
283
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100284 std::string m_OutputBinding;
285 float m_Scale=0.0f;
286 int m_Offset=0;
Sadik Armagan77086282019-09-02 11:46:28 +0100287 std::string m_OutputTensorFile;
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000288 bool m_DequantizeOutput = false;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100289};
290
291
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100292
293template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
294std::vector<T> GenerateDummyTensorData(unsigned int numElements)
295{
296 return std::vector<T>(numElements, static_cast<T>(0));
297}
298
299using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
300using QuantizationParams = std::pair<float, int32_t>;
301
302void PopulateTensorWithData(TContainer& tensorData,
303 unsigned int numElements,
304 const std::string& dataTypeStr,
305 const armnn::Optional<QuantizationParams>& qParams,
306 const armnn::Optional<std::string>& dataFile)
307{
308 const bool readFromFile = dataFile.has_value() && !dataFile.value().empty();
309 const bool quantizeData = qParams.has_value();
310
311 std::ifstream inputTensorFile;
312 if (readFromFile)
313 {
314 inputTensorFile = std::ifstream(dataFile.value());
315 }
316
317 if (dataTypeStr.compare("float") == 0)
318 {
319 if (quantizeData)
320 {
321 const float qScale = qParams.value().first;
322 const int qOffset = qParams.value().second;
323
324 tensorData = readFromFile ?
Derek Lambertif90c56d2020-01-10 17:14:08 +0000325 ParseDataArray<armnn::DataType::QAsymmU8>(inputTensorFile, qScale, qOffset) :
326 GenerateDummyTensorData<armnn::DataType::QAsymmU8>(numElements);
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100327 }
328 else
329 {
330 tensorData = readFromFile ?
331 ParseDataArray<armnn::DataType::Float32>(inputTensorFile) :
332 GenerateDummyTensorData<armnn::DataType::Float32>(numElements);
333 }
334 }
335 else if (dataTypeStr.compare("int") == 0)
336 {
337 tensorData = readFromFile ?
338 ParseDataArray<armnn::DataType::Signed32>(inputTensorFile) :
339 GenerateDummyTensorData<armnn::DataType::Signed32>(numElements);
340 }
341 else if (dataTypeStr.compare("qasymm8") == 0)
342 {
343 tensorData = readFromFile ?
Derek Lambertif90c56d2020-01-10 17:14:08 +0000344 ParseDataArray<armnn::DataType::QAsymmU8>(inputTensorFile) :
345 GenerateDummyTensorData<armnn::DataType::QAsymmU8>(numElements);
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100346 }
347 else
348 {
349 std::string errorMessage = "Unsupported tensor data type " + dataTypeStr;
Derek Lamberti08446972019-11-26 16:38:31 +0000350 ARMNN_LOG(fatal) << errorMessage;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100351
352 inputTensorFile.close();
353 throw armnn::Exception(errorMessage);
354 }
355
356 inputTensorFile.close();
357}
358
359} // anonymous namespace
360
361bool generateTensorData = true;
362
363struct ExecuteNetworkParams
364{
365 using TensorShapePtr = std::unique_ptr<armnn::TensorShape>;
366
367 const char* m_ModelPath;
368 bool m_IsModelBinary;
369 std::vector<armnn::BackendId> m_ComputeDevices;
370 std::string m_DynamicBackendsPath;
371 std::vector<string> m_InputNames;
372 std::vector<TensorShapePtr> m_InputTensorShapes;
373 std::vector<string> m_InputTensorDataFilePaths;
374 std::vector<string> m_InputTypes;
375 bool m_QuantizeInput;
376 std::vector<string> m_OutputTypes;
377 std::vector<string> m_OutputNames;
378 std::vector<string> m_OutputTensorFiles;
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000379 bool m_DequantizeOutput;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100380 bool m_EnableProfiling;
381 bool m_EnableFp16TurboMode;
382 double m_ThresholdTime;
383 bool m_PrintIntermediate;
384 size_t m_SubgraphId;
385 bool m_EnableLayerDetails = false;
386 bool m_GenerateTensorData;
Derek Lamberti132563c2019-12-02 16:06:40 +0000387 bool m_ParseUnsupported = false;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100388};
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100389
390template<typename TParser, typename TDataType>
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100391int MainImpl(const ExecuteNetworkParams& params,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100392 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
393{
394 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
395
396 std::vector<TContainer> inputDataContainers;
397
398 try
399 {
400 // Creates an InferenceModel, which will parse the model and load it into an IRuntime.
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100401 typename InferenceModel<TParser, TDataType>::Params inferenceModelParams;
402 inferenceModelParams.m_ModelPath = params.m_ModelPath;
403 inferenceModelParams.m_IsModelBinary = params.m_IsModelBinary;
404 inferenceModelParams.m_ComputeDevices = params.m_ComputeDevices;
405 inferenceModelParams.m_DynamicBackendsPath = params.m_DynamicBackendsPath;
406 inferenceModelParams.m_PrintIntermediateLayers = params.m_PrintIntermediate;
407 inferenceModelParams.m_VisualizePostOptimizationModel = params.m_EnableLayerDetails;
Derek Lamberti167c0822019-12-03 09:37:32 +0000408 inferenceModelParams.m_ParseUnsupported = params.m_ParseUnsupported;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100409
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100410 for(const std::string& inputName: params.m_InputNames)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100411 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100412 inferenceModelParams.m_InputBindings.push_back(inputName);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100413 }
414
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100415 for(unsigned int i = 0; i < params.m_InputTensorShapes.size(); ++i)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100416 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100417 inferenceModelParams.m_InputShapes.push_back(*params.m_InputTensorShapes[i]);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100418 }
419
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100420 for(const std::string& outputName: params.m_OutputNames)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100421 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100422 inferenceModelParams.m_OutputBindings.push_back(outputName);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100423 }
424
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100425 inferenceModelParams.m_SubgraphId = params.m_SubgraphId;
426 inferenceModelParams.m_EnableFp16TurboMode = params.m_EnableFp16TurboMode;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100427
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100428 InferenceModel<TParser, TDataType> model(inferenceModelParams,
429 params.m_EnableProfiling,
430 params.m_DynamicBackendsPath,
431 runtime);
432
433 const size_t numInputs = inferenceModelParams.m_InputBindings.size();
434 for(unsigned int i = 0; i < numInputs; ++i)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100435 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100436 armnn::Optional<QuantizationParams> qParams = params.m_QuantizeInput ?
437 armnn::MakeOptional<QuantizationParams>(model.GetInputQuantizationParams()) :
438 armnn::EmptyOptional();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100439
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100440 armnn::Optional<std::string> dataFile = params.m_GenerateTensorData ?
441 armnn::EmptyOptional() :
442 armnn::MakeOptional<std::string>(params.m_InputTensorDataFilePaths[i]);
443
444 unsigned int numElements = model.GetInputSize(i);
445 if (params.m_InputTensorShapes.size() > i && params.m_InputTensorShapes[i])
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100446 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100447 // If the user has provided a tensor shape for the current input,
448 // override numElements
449 numElements = params.m_InputTensorShapes[i]->GetNumElements();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100450 }
451
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100452 TContainer tensorData;
453 PopulateTensorWithData(tensorData,
454 numElements,
455 params.m_InputTypes[i],
456 qParams,
457 dataFile);
458
459 inputDataContainers.push_back(tensorData);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100460 }
461
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100462 const size_t numOutputs = inferenceModelParams.m_OutputBindings.size();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100463 std::vector<TContainer> outputDataContainers;
464
465 for (unsigned int i = 0; i < numOutputs; ++i)
466 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100467 if (params.m_OutputTypes[i].compare("float") == 0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100468 {
469 outputDataContainers.push_back(std::vector<float>(model.GetOutputSize(i)));
470 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100471 else if (params.m_OutputTypes[i].compare("int") == 0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100472 {
473 outputDataContainers.push_back(std::vector<int>(model.GetOutputSize(i)));
474 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100475 else if (params.m_OutputTypes[i].compare("qasymm8") == 0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100476 {
477 outputDataContainers.push_back(std::vector<uint8_t>(model.GetOutputSize(i)));
478 }
479 else
480 {
Derek Lamberti08446972019-11-26 16:38:31 +0000481 ARMNN_LOG(fatal) << "Unsupported tensor data type \"" << params.m_OutputTypes[i] << "\". ";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100482 return EXIT_FAILURE;
483 }
484 }
485
486 // model.Run returns the inference time elapsed in EnqueueWorkload (in milliseconds)
487 auto inference_duration = model.Run(inputDataContainers, outputDataContainers);
488
Matteo Martincighd6f26fc2019-10-28 10:48:05 +0000489 if (params.m_GenerateTensorData)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100490 {
Derek Lamberti08446972019-11-26 16:38:31 +0000491 ARMNN_LOG(warning) << "The input data was generated, note that the output will not be useful";
Matteo Martincighd6f26fc2019-10-28 10:48:05 +0000492 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100493
Matteo Martincighd6f26fc2019-10-28 10:48:05 +0000494 // Print output tensors
495 const auto& infosOut = model.GetOutputBindingInfos();
496 for (size_t i = 0; i < numOutputs; i++)
497 {
498 const armnn::TensorInfo& infoOut = infosOut[i].second;
499 auto outputTensorFile = params.m_OutputTensorFiles.empty() ? "" : params.m_OutputTensorFiles[i];
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100500
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000501 TensorPrinter printer(inferenceModelParams.m_OutputBindings[i],
502 infoOut,
503 outputTensorFile,
504 params.m_DequantizeOutput);
Matteo Martincighd6f26fc2019-10-28 10:48:05 +0000505 boost::apply_visitor(printer, outputDataContainers[i]);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100506 }
507
Derek Lamberti08446972019-11-26 16:38:31 +0000508 ARMNN_LOG(info) << "\nInference time: " << std::setprecision(2)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100509 << std::fixed << inference_duration.count() << " ms";
510
511 // If thresholdTime == 0.0 (default), then it hasn't been supplied at command line
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100512 if (params.m_ThresholdTime != 0.0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100513 {
Derek Lamberti08446972019-11-26 16:38:31 +0000514 ARMNN_LOG(info) << "Threshold time: " << std::setprecision(2)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100515 << std::fixed << params.m_ThresholdTime << " ms";
516 auto thresholdMinusInference = params.m_ThresholdTime - inference_duration.count();
Derek Lamberti08446972019-11-26 16:38:31 +0000517 ARMNN_LOG(info) << "Threshold time - Inference time: " << std::setprecision(2)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100518 << std::fixed << thresholdMinusInference << " ms" << "\n";
519
520 if (thresholdMinusInference < 0)
521 {
Nikhil Raj55377472020-01-20 11:50:16 +0000522 std::string errorMessage = "Elapsed inference time is greater than provided threshold time.";
523 ARMNN_LOG(fatal) << errorMessage;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100524 }
525 }
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100526 }
527 catch (armnn::Exception const& e)
528 {
Derek Lamberti08446972019-11-26 16:38:31 +0000529 ARMNN_LOG(fatal) << "Armnn Error: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100530 return EXIT_FAILURE;
531 }
532
533 return EXIT_SUCCESS;
534}
535
536// This will run a test
537int RunTest(const std::string& format,
538 const std::string& inputTensorShapesStr,
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100539 const vector<armnn::BackendId>& computeDevices,
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100540 const std::string& dynamicBackendsPath,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100541 const std::string& path,
542 const std::string& inputNames,
543 const std::string& inputTensorDataFilePaths,
544 const std::string& inputTypes,
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100545 bool quantizeInput,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100546 const std::string& outputTypes,
547 const std::string& outputNames,
Sadik Armagan77086282019-09-02 11:46:28 +0100548 const std::string& outputTensorFiles,
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000549 bool dequantizeOuput,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100550 bool enableProfiling,
551 bool enableFp16TurboMode,
552 const double& thresholdTime,
Matthew Jackson54658b92019-08-27 15:35:59 +0100553 bool printIntermediate,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100554 const size_t subgraphId,
Andre Ghattas23ae2ea2019-08-07 12:18:38 +0100555 bool enableLayerDetails = false,
Derek Lamberti132563c2019-12-02 16:06:40 +0000556 bool parseUnsupported = false,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100557 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
558{
559 std::string modelFormat = boost::trim_copy(format);
560 std::string modelPath = boost::trim_copy(path);
561 std::vector<std::string> inputNamesVector = ParseStringList(inputNames, ",");
Francis Murtagh1555cbd2019-10-08 14:47:46 +0100562 std::vector<std::string> inputTensorShapesVector = ParseStringList(inputTensorShapesStr, ":");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100563 std::vector<std::string> inputTensorDataFilePathsVector = ParseStringList(
564 inputTensorDataFilePaths, ",");
565 std::vector<std::string> outputNamesVector = ParseStringList(outputNames, ",");
566 std::vector<std::string> inputTypesVector = ParseStringList(inputTypes, ",");
567 std::vector<std::string> outputTypesVector = ParseStringList(outputTypes, ",");
Sadik Armagan77086282019-09-02 11:46:28 +0100568 std::vector<std::string> outputTensorFilesVector = ParseStringList(outputTensorFiles, ",");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100569
570 // Parse model binary flag from the model-format string we got from the command-line
571 bool isModelBinary;
572 if (modelFormat.find("bin") != std::string::npos)
573 {
574 isModelBinary = true;
575 }
576 else if (modelFormat.find("txt") != std::string::npos || modelFormat.find("text") != std::string::npos)
577 {
578 isModelBinary = false;
579 }
580 else
581 {
Derek Lamberti08446972019-11-26 16:38:31 +0000582 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100583 return EXIT_FAILURE;
584 }
585
586 if ((inputTensorShapesVector.size() != 0) && (inputTensorShapesVector.size() != inputNamesVector.size()))
587 {
Derek Lamberti08446972019-11-26 16:38:31 +0000588 ARMNN_LOG(fatal) << "input-name and input-tensor-shape must have the same amount of elements.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100589 return EXIT_FAILURE;
590 }
591
592 if ((inputTensorDataFilePathsVector.size() != 0) &&
593 (inputTensorDataFilePathsVector.size() != inputNamesVector.size()))
594 {
Derek Lamberti08446972019-11-26 16:38:31 +0000595 ARMNN_LOG(fatal) << "input-name and input-tensor-data must have the same amount of elements.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100596 return EXIT_FAILURE;
597 }
598
Sadik Armagan77086282019-09-02 11:46:28 +0100599 if ((outputTensorFilesVector.size() != 0) &&
600 (outputTensorFilesVector.size() != outputNamesVector.size()))
601 {
Derek Lamberti08446972019-11-26 16:38:31 +0000602 ARMNN_LOG(fatal) << "output-name and write-outputs-to-file must have the same amount of elements.";
Sadik Armagan77086282019-09-02 11:46:28 +0100603 return EXIT_FAILURE;
604 }
605
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100606 if (inputTypesVector.size() == 0)
607 {
608 //Defaults the value of all inputs to "float"
609 inputTypesVector.assign(inputNamesVector.size(), "float");
610 }
Matteo Martincigh08b51862019-08-29 16:26:10 +0100611 else if ((inputTypesVector.size() != 0) && (inputTypesVector.size() != inputNamesVector.size()))
612 {
Derek Lamberti08446972019-11-26 16:38:31 +0000613 ARMNN_LOG(fatal) << "input-name and input-type must have the same amount of elements.";
Matteo Martincigh08b51862019-08-29 16:26:10 +0100614 return EXIT_FAILURE;
615 }
616
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100617 if (outputTypesVector.size() == 0)
618 {
619 //Defaults the value of all outputs to "float"
620 outputTypesVector.assign(outputNamesVector.size(), "float");
621 }
Matteo Martincigh08b51862019-08-29 16:26:10 +0100622 else if ((outputTypesVector.size() != 0) && (outputTypesVector.size() != outputNamesVector.size()))
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100623 {
Derek Lamberti08446972019-11-26 16:38:31 +0000624 ARMNN_LOG(fatal) << "output-name and output-type must have the same amount of elements.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100625 return EXIT_FAILURE;
626 }
627
628 // Parse input tensor shape from the string we got from the command-line.
629 std::vector<std::unique_ptr<armnn::TensorShape>> inputTensorShapes;
630
631 if (!inputTensorShapesVector.empty())
632 {
633 inputTensorShapes.reserve(inputTensorShapesVector.size());
634
635 for(const std::string& shape : inputTensorShapesVector)
636 {
637 std::stringstream ss(shape);
638 std::vector<unsigned int> dims = ParseArray(ss);
639
640 try
641 {
642 // Coverity fix: An exception of type armnn::InvalidArgumentException is thrown and never caught.
643 inputTensorShapes.push_back(std::make_unique<armnn::TensorShape>(dims.size(), dims.data()));
644 }
645 catch (const armnn::InvalidArgumentException& e)
646 {
Derek Lamberti08446972019-11-26 16:38:31 +0000647 ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100648 return EXIT_FAILURE;
649 }
650 }
651 }
652
653 // Check that threshold time is not less than zero
654 if (thresholdTime < 0)
655 {
Derek Lamberti08446972019-11-26 16:38:31 +0000656 ARMNN_LOG(fatal) << "Threshold time supplied as a command line argument is less than zero.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100657 return EXIT_FAILURE;
658 }
659
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100660 ExecuteNetworkParams params;
661 params.m_ModelPath = modelPath.c_str();
662 params.m_IsModelBinary = isModelBinary;
663 params.m_ComputeDevices = computeDevices;
664 params.m_DynamicBackendsPath = dynamicBackendsPath;
665 params.m_InputNames = inputNamesVector;
666 params.m_InputTensorShapes = std::move(inputTensorShapes);
667 params.m_InputTensorDataFilePaths = inputTensorDataFilePathsVector;
668 params.m_InputTypes = inputTypesVector;
669 params.m_QuantizeInput = quantizeInput;
670 params.m_OutputTypes = outputTypesVector;
671 params.m_OutputNames = outputNamesVector;
672 params.m_OutputTensorFiles = outputTensorFilesVector;
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000673 params.m_DequantizeOutput = dequantizeOuput;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100674 params.m_EnableProfiling = enableProfiling;
675 params.m_EnableFp16TurboMode = enableFp16TurboMode;
676 params.m_ThresholdTime = thresholdTime;
677 params.m_PrintIntermediate = printIntermediate;
678 params.m_SubgraphId = subgraphId;
679 params.m_EnableLayerDetails = enableLayerDetails;
680 params.m_GenerateTensorData = inputTensorDataFilePathsVector.empty();
Derek Lamberti132563c2019-12-02 16:06:40 +0000681 params.m_ParseUnsupported = parseUnsupported;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100682
683 // Warn if ExecuteNetwork will generate dummy input data
684 if (params.m_GenerateTensorData)
685 {
Derek Lamberti08446972019-11-26 16:38:31 +0000686 ARMNN_LOG(warning) << "No input files provided, input tensors will be filled with 0s.";
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100687 }
688
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100689 // Forward to implementation based on the parser type
690 if (modelFormat.find("armnn") != std::string::npos)
691 {
692#if defined(ARMNN_SERIALIZER)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100693 return MainImpl<armnnDeserializer::IDeserializer, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100694#else
Derek Lamberti08446972019-11-26 16:38:31 +0000695 ARMNN_LOG(fatal) << "Not built with serialization support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100696 return EXIT_FAILURE;
697#endif
698 }
699 else if (modelFormat.find("caffe") != std::string::npos)
700 {
701#if defined(ARMNN_CAFFE_PARSER)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100702 return MainImpl<armnnCaffeParser::ICaffeParser, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100703#else
Derek Lamberti08446972019-11-26 16:38:31 +0000704 ARMNN_LOG(fatal) << "Not built with Caffe parser support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100705 return EXIT_FAILURE;
706#endif
707 }
708 else if (modelFormat.find("onnx") != std::string::npos)
709{
710#if defined(ARMNN_ONNX_PARSER)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100711 return MainImpl<armnnOnnxParser::IOnnxParser, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100712#else
Derek Lamberti08446972019-11-26 16:38:31 +0000713 ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100714 return EXIT_FAILURE;
715#endif
716 }
717 else if (modelFormat.find("tensorflow") != std::string::npos)
718 {
719#if defined(ARMNN_TF_PARSER)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100720 return MainImpl<armnnTfParser::ITfParser, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100721#else
Derek Lamberti08446972019-11-26 16:38:31 +0000722 ARMNN_LOG(fatal) << "Not built with Tensorflow parser support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100723 return EXIT_FAILURE;
724#endif
725 }
726 else if(modelFormat.find("tflite") != std::string::npos)
727 {
728#if defined(ARMNN_TF_LITE_PARSER)
729 if (! isModelBinary)
730 {
Derek Lamberti08446972019-11-26 16:38:31 +0000731 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100732 for tflite files";
733 return EXIT_FAILURE;
734 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100735 return MainImpl<armnnTfLiteParser::ITfLiteParser, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100736#else
Derek Lamberti08446972019-11-26 16:38:31 +0000737 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat <<
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100738 "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
739 return EXIT_FAILURE;
740#endif
741 }
742 else
743 {
Derek Lamberti08446972019-11-26 16:38:31 +0000744 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat <<
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100745 "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
746 return EXIT_FAILURE;
747 }
748}
749
750int RunCsvTest(const armnnUtils::CsvRow &csvRow, const std::shared_ptr<armnn::IRuntime>& runtime,
Matthew Jackson54658b92019-08-27 15:35:59 +0100751 const bool enableProfiling, const bool enableFp16TurboMode, const double& thresholdTime,
Derek Lamberti132563c2019-12-02 16:06:40 +0000752 const bool printIntermediate, bool enableLayerDetails = false, bool parseUnuspported = false)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100753{
Derek Lambertieb1fce02019-12-10 21:20:10 +0000754 boost::ignore_unused(runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100755 std::string modelFormat;
756 std::string modelPath;
757 std::string inputNames;
758 std::string inputTensorShapes;
759 std::string inputTensorDataFilePaths;
760 std::string outputNames;
761 std::string inputTypes;
762 std::string outputTypes;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100763 std::string dynamicBackendsPath;
Sadik Armagan77086282019-09-02 11:46:28 +0100764 std::string outputTensorFiles;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100765
766 size_t subgraphId = 0;
767
768 const std::string backendsMessage = std::string("The preferred order of devices to run layers on by default. ")
769 + std::string("Possible choices: ")
770 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
771
772 po::options_description desc("Options");
773 try
774 {
775 desc.add_options()
776 ("model-format,f", po::value(&modelFormat),
777 "armnn-binary, caffe-binary, caffe-text, tflite-binary, onnx-binary, onnx-text, tensorflow-binary or "
778 "tensorflow-text.")
779 ("model-path,m", po::value(&modelPath), "Path to model file, e.g. .armnn, .caffemodel, .prototxt, "
780 ".tflite, .onnx")
781 ("compute,c", po::value<std::vector<armnn::BackendId>>()->multitoken(),
782 backendsMessage.c_str())
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100783 ("dynamic-backends-path,b", po::value(&dynamicBackendsPath),
784 "Path where to load any available dynamic backend from. "
785 "If left empty (the default), dynamic backends will not be used.")
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100786 ("input-name,i", po::value(&inputNames), "Identifier of the input tensors in the network separated by comma.")
787 ("subgraph-number,n", po::value<size_t>(&subgraphId)->default_value(0), "Id of the subgraph to be "
788 "executed. Defaults to 0.")
789 ("input-tensor-shape,s", po::value(&inputTensorShapes),
790 "The shape of the input tensors in the network as a flat array of integers separated by comma. "
791 "Several shapes can be passed separating them by semicolon. "
792 "This parameter is optional, depending on the network.")
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100793 ("input-tensor-data,d", po::value(&inputTensorDataFilePaths)->default_value(""),
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100794 "Path to files containing the input data as a flat array separated by whitespace. "
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100795 "Several paths can be passed separating them by comma. If not specified, the network will be run with dummy "
796 "data (useful for profiling).")
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100797 ("input-type,y",po::value(&inputTypes), "The type of the input tensors in the network separated by comma. "
798 "If unset, defaults to \"float\" for all defined inputs. "
799 "Accepted values (float, int or qasymm8).")
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100800 ("quantize-input,q",po::bool_switch()->default_value(false),
801 "If this option is enabled, all float inputs will be quantized to qasymm8. "
802 "If unset, default to not quantized. "
803 "Accepted values (true or false)")
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100804 ("output-type,z",po::value(&outputTypes), "The type of the output tensors in the network separated by comma. "
805 "If unset, defaults to \"float\" for all defined outputs. "
806 "Accepted values (float, int or qasymm8).")
807 ("output-name,o", po::value(&outputNames),
Sadik Armagan77086282019-09-02 11:46:28 +0100808 "Identifier of the output tensors in the network separated by comma.")
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000809 ("dequantize-output,l",po::bool_switch()->default_value(false),
810 "If this option is enabled, all quantized outputs will be dequantized to float. "
811 "If unset, default to not get dequantized. "
812 "Accepted values (true or false)")
Sadik Armagan77086282019-09-02 11:46:28 +0100813 ("write-outputs-to-file,w", po::value(&outputTensorFiles),
814 "Comma-separated list of output file paths keyed with the binding-id of the output slot. "
815 "If left empty (the default), the output tensors will not be written to a file.");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100816 }
817 catch (const std::exception& e)
818 {
819 // Coverity points out that default_value(...) can throw a bad_lexical_cast,
820 // and that desc.add_options() can throw boost::io::too_few_args.
821 // They really won't in any of these cases.
822 BOOST_ASSERT_MSG(false, "Caught unexpected exception");
Derek Lamberti08446972019-11-26 16:38:31 +0000823 ARMNN_LOG(fatal) << "Fatal internal error: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100824 return EXIT_FAILURE;
825 }
826
827 std::vector<const char*> clOptions;
828 clOptions.reserve(csvRow.values.size());
829 for (const std::string& value : csvRow.values)
830 {
831 clOptions.push_back(value.c_str());
832 }
833
834 po::variables_map vm;
835 try
836 {
837 po::store(po::parse_command_line(static_cast<int>(clOptions.size()), clOptions.data(), desc), vm);
838
839 po::notify(vm);
840
841 CheckOptionDependencies(vm);
842 }
843 catch (const po::error& e)
844 {
845 std::cerr << e.what() << std::endl << std::endl;
846 std::cerr << desc << std::endl;
847 return EXIT_FAILURE;
848 }
849
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100850 // Get the value of the switch arguments.
851 bool quantizeInput = vm["quantize-input"].as<bool>();
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000852 bool dequantizeOutput = vm["dequantize-output"].as<bool>();
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100853
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100854 // Get the preferred order of compute devices.
855 std::vector<armnn::BackendId> computeDevices = vm["compute"].as<std::vector<armnn::BackendId>>();
856
857 // Remove duplicates from the list of compute devices.
858 RemoveDuplicateDevices(computeDevices);
859
860 // Check that the specified compute devices are valid.
861 std::string invalidBackends;
862 if (!CheckRequestedBackendsAreValid(computeDevices, armnn::Optional<std::string&>(invalidBackends)))
863 {
Derek Lamberti08446972019-11-26 16:38:31 +0000864 ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100865 << invalidBackends;
866 return EXIT_FAILURE;
867 }
868
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100869 return RunTest(modelFormat, inputTensorShapes, computeDevices, dynamicBackendsPath, modelPath, inputNames,
Sadik Armagan77086282019-09-02 11:46:28 +0100870 inputTensorDataFilePaths, inputTypes, quantizeInput, outputTypes, outputNames, outputTensorFiles,
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000871 dequantizeOutput, enableProfiling, enableFp16TurboMode, thresholdTime, printIntermediate, subgraphId,
Derek Lamberti132563c2019-12-02 16:06:40 +0000872 enableLayerDetails, parseUnuspported);
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100873}