blob: 97cf7c2b52a27380d291514289b8b35a0d25b482 [file] [log] [blame]
Francis Murtaghbee4bc92019-06-18 12:30:37 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#include <armnn/ArmNN.hpp>
6#include <armnn/TypesUtils.hpp>
7
8#if defined(ARMNN_SERIALIZER)
9#include "armnnDeserializer/IDeserializer.hpp"
10#endif
11#if defined(ARMNN_CAFFE_PARSER)
12#include "armnnCaffeParser/ICaffeParser.hpp"
13#endif
14#if defined(ARMNN_TF_PARSER)
15#include "armnnTfParser/ITfParser.hpp"
16#endif
17#if defined(ARMNN_TF_LITE_PARSER)
18#include "armnnTfLiteParser/ITfLiteParser.hpp"
19#endif
20#if defined(ARMNN_ONNX_PARSER)
21#include "armnnOnnxParser/IOnnxParser.hpp"
22#endif
23#include "CsvReader.hpp"
24#include "../InferenceTest.hpp"
25
Francis Murtaghbee4bc92019-06-18 12:30:37 +010026#include <Profiling.hpp>
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +010027#include <ResolveType.hpp>
Francis Murtaghbee4bc92019-06-18 12:30:37 +010028
29#include <boost/algorithm/string/trim.hpp>
30#include <boost/algorithm/string/split.hpp>
31#include <boost/algorithm/string/classification.hpp>
32#include <boost/program_options.hpp>
33#include <boost/variant.hpp>
34
35#include <iostream>
36#include <fstream>
37#include <functional>
38#include <future>
39#include <algorithm>
40#include <iterator>
41
42namespace
43{
44
45// Configure boost::program_options for command-line parsing and validation.
46namespace po = boost::program_options;
47
48template<typename T, typename TParseElementFunc>
49std::vector<T> ParseArrayImpl(std::istream& stream, TParseElementFunc parseElementFunc, const char * chars = "\t ,:")
50{
51 std::vector<T> result;
52 // Processes line-by-line.
53 std::string line;
54 while (std::getline(stream, line))
55 {
56 std::vector<std::string> tokens;
57 try
58 {
59 // Coverity fix: boost::split() may throw an exception of type boost::bad_function_call.
60 boost::split(tokens, line, boost::algorithm::is_any_of(chars), boost::token_compress_on);
61 }
62 catch (const std::exception& e)
63 {
Derek Lamberti08446972019-11-26 16:38:31 +000064 ARMNN_LOG(error) << "An error occurred when splitting tokens: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +010065 continue;
66 }
67 for (const std::string& token : tokens)
68 {
69 if (!token.empty()) // See https://stackoverflow.com/questions/10437406/
70 {
71 try
72 {
73 result.push_back(parseElementFunc(token));
74 }
75 catch (const std::exception&)
76 {
Derek Lamberti08446972019-11-26 16:38:31 +000077 ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +010078 }
79 }
80 }
81 }
82
83 return result;
84}
85
86bool CheckOption(const po::variables_map& vm,
87 const char* option)
88{
89 // Check that the given option is valid.
90 if (option == nullptr)
91 {
92 return false;
93 }
94
95 // Check whether 'option' is provided.
96 return vm.find(option) != vm.end();
97}
98
99void CheckOptionDependency(const po::variables_map& vm,
100 const char* option,
101 const char* required)
102{
103 // Check that the given options are valid.
104 if (option == nullptr || required == nullptr)
105 {
106 throw po::error("Invalid option to check dependency for");
107 }
108
109 // Check that if 'option' is provided, 'required' is also provided.
110 if (CheckOption(vm, option) && !vm[option].defaulted())
111 {
112 if (CheckOption(vm, required) == 0 || vm[required].defaulted())
113 {
114 throw po::error(std::string("Option '") + option + "' requires option '" + required + "'.");
115 }
116 }
117}
118
119void CheckOptionDependencies(const po::variables_map& vm)
120{
121 CheckOptionDependency(vm, "model-path", "model-format");
122 CheckOptionDependency(vm, "model-path", "input-name");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100123 CheckOptionDependency(vm, "model-path", "output-name");
124 CheckOptionDependency(vm, "input-tensor-shape", "model-path");
125}
126
127template<armnn::DataType NonQuantizedType>
128auto ParseDataArray(std::istream & stream);
129
130template<armnn::DataType QuantizedType>
131auto ParseDataArray(std::istream& stream,
132 const float& quantizationScale,
133 const int32_t& quantizationOffset);
134
135template<>
136auto ParseDataArray<armnn::DataType::Float32>(std::istream & stream)
137{
138 return ParseArrayImpl<float>(stream, [](const std::string& s) { return std::stof(s); });
139}
140
141template<>
142auto ParseDataArray<armnn::DataType::Signed32>(std::istream & stream)
143{
144 return ParseArrayImpl<int>(stream, [](const std::string & s) { return std::stoi(s); });
145}
146
147template<>
Derek Lambertif90c56d2020-01-10 17:14:08 +0000148auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream)
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100149{
150 return ParseArrayImpl<uint8_t>(stream,
151 [](const std::string& s) { return boost::numeric_cast<uint8_t>(std::stoi(s)); });
152}
153
154template<>
Derek Lambertif90c56d2020-01-10 17:14:08 +0000155auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100156 const float& quantizationScale,
157 const int32_t& quantizationOffset)
158{
159 return ParseArrayImpl<uint8_t>(stream,
160 [&quantizationScale, &quantizationOffset](const std::string & s)
161 {
162 return boost::numeric_cast<uint8_t>(
Rob Hughes93667b12019-09-23 16:24:05 +0100163 armnn::Quantize<uint8_t>(std::stof(s),
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100164 quantizationScale,
165 quantizationOffset));
166 });
167}
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100168std::vector<unsigned int> ParseArray(std::istream& stream)
169{
170 return ParseArrayImpl<unsigned int>(stream,
171 [](const std::string& s) { return boost::numeric_cast<unsigned int>(std::stoi(s)); });
172}
173
174std::vector<std::string> ParseStringList(const std::string & inputString, const char * delimiter)
175{
176 std::stringstream stream(inputString);
177 return ParseArrayImpl<std::string>(stream, [](const std::string& s) { return boost::trim_copy(s); }, delimiter);
178}
179
180void RemoveDuplicateDevices(std::vector<armnn::BackendId>& computeDevices)
181{
182 // Mark the duplicate devices as 'Undefined'.
183 for (auto i = computeDevices.begin(); i != computeDevices.end(); ++i)
184 {
185 for (auto j = std::next(i); j != computeDevices.end(); ++j)
186 {
187 if (*j == *i)
188 {
189 *j = armnn::Compute::Undefined;
190 }
191 }
192 }
193
194 // Remove 'Undefined' devices.
195 computeDevices.erase(std::remove(computeDevices.begin(), computeDevices.end(), armnn::Compute::Undefined),
196 computeDevices.end());
197}
198
199struct TensorPrinter : public boost::static_visitor<>
200{
Sadik Armagan77086282019-09-02 11:46:28 +0100201 TensorPrinter(const std::string& binding, const armnn::TensorInfo& info, const std::string& outputTensorFile)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100202 : m_OutputBinding(binding)
203 , m_Scale(info.GetQuantizationScale())
204 , m_Offset(info.GetQuantizationOffset())
Sadik Armagan77086282019-09-02 11:46:28 +0100205 , m_OutputTensorFile(outputTensorFile)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100206 {}
207
208 void operator()(const std::vector<float>& values)
209 {
Sadik Armagan77086282019-09-02 11:46:28 +0100210 ForEachValue(values, [](float value)
211 {
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100212 printf("%f ", value);
213 });
Sadik Armagan77086282019-09-02 11:46:28 +0100214 WriteToFile(values);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100215 }
216
217 void operator()(const std::vector<uint8_t>& values)
218 {
219 auto& scale = m_Scale;
220 auto& offset = m_Offset;
Sadik Armagan77086282019-09-02 11:46:28 +0100221 std::vector<float> dequantizedValues;
222 ForEachValue(values, [&scale, &offset, &dequantizedValues](uint8_t value)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100223 {
Sadik Armagan77086282019-09-02 11:46:28 +0100224 auto dequantizedValue = armnn::Dequantize(value, scale, offset);
225 printf("%f ", dequantizedValue);
226 dequantizedValues.push_back(dequantizedValue);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100227 });
Sadik Armagan77086282019-09-02 11:46:28 +0100228 WriteToFile(dequantizedValues);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100229 }
230
231 void operator()(const std::vector<int>& values)
232 {
233 ForEachValue(values, [](int value)
234 {
235 printf("%d ", value);
236 });
Sadik Armagan77086282019-09-02 11:46:28 +0100237 WriteToFile(values);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100238 }
239
240private:
241 template<typename Container, typename Delegate>
242 void ForEachValue(const Container& c, Delegate delegate)
243 {
244 std::cout << m_OutputBinding << ": ";
245 for (const auto& value : c)
246 {
247 delegate(value);
248 }
249 printf("\n");
250 }
251
Sadik Armagan77086282019-09-02 11:46:28 +0100252 template<typename T>
253 void WriteToFile(const std::vector<T>& values)
254 {
255 if (!m_OutputTensorFile.empty())
256 {
257 std::ofstream outputTensorFile;
258 outputTensorFile.open(m_OutputTensorFile, std::ofstream::out | std::ofstream::trunc);
259 if (outputTensorFile.is_open())
260 {
261 outputTensorFile << m_OutputBinding << ": ";
262 std::copy(values.begin(), values.end(), std::ostream_iterator<T>(outputTensorFile, " "));
263 }
264 else
265 {
Derek Lamberti08446972019-11-26 16:38:31 +0000266 ARMNN_LOG(info) << "Output Tensor File: " << m_OutputTensorFile << " could not be opened!";
Sadik Armagan77086282019-09-02 11:46:28 +0100267 }
268 outputTensorFile.close();
269 }
270 }
271
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100272 std::string m_OutputBinding;
273 float m_Scale=0.0f;
274 int m_Offset=0;
Sadik Armagan77086282019-09-02 11:46:28 +0100275 std::string m_OutputTensorFile;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100276};
277
278
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100279
280template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
281std::vector<T> GenerateDummyTensorData(unsigned int numElements)
282{
283 return std::vector<T>(numElements, static_cast<T>(0));
284}
285
286using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
287using QuantizationParams = std::pair<float, int32_t>;
288
289void PopulateTensorWithData(TContainer& tensorData,
290 unsigned int numElements,
291 const std::string& dataTypeStr,
292 const armnn::Optional<QuantizationParams>& qParams,
293 const armnn::Optional<std::string>& dataFile)
294{
295 const bool readFromFile = dataFile.has_value() && !dataFile.value().empty();
296 const bool quantizeData = qParams.has_value();
297
298 std::ifstream inputTensorFile;
299 if (readFromFile)
300 {
301 inputTensorFile = std::ifstream(dataFile.value());
302 }
303
304 if (dataTypeStr.compare("float") == 0)
305 {
306 if (quantizeData)
307 {
308 const float qScale = qParams.value().first;
309 const int qOffset = qParams.value().second;
310
311 tensorData = readFromFile ?
Derek Lambertif90c56d2020-01-10 17:14:08 +0000312 ParseDataArray<armnn::DataType::QAsymmU8>(inputTensorFile, qScale, qOffset) :
313 GenerateDummyTensorData<armnn::DataType::QAsymmU8>(numElements);
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100314 }
315 else
316 {
317 tensorData = readFromFile ?
318 ParseDataArray<armnn::DataType::Float32>(inputTensorFile) :
319 GenerateDummyTensorData<armnn::DataType::Float32>(numElements);
320 }
321 }
322 else if (dataTypeStr.compare("int") == 0)
323 {
324 tensorData = readFromFile ?
325 ParseDataArray<armnn::DataType::Signed32>(inputTensorFile) :
326 GenerateDummyTensorData<armnn::DataType::Signed32>(numElements);
327 }
328 else if (dataTypeStr.compare("qasymm8") == 0)
329 {
330 tensorData = readFromFile ?
Derek Lambertif90c56d2020-01-10 17:14:08 +0000331 ParseDataArray<armnn::DataType::QAsymmU8>(inputTensorFile) :
332 GenerateDummyTensorData<armnn::DataType::QAsymmU8>(numElements);
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100333 }
334 else
335 {
336 std::string errorMessage = "Unsupported tensor data type " + dataTypeStr;
Derek Lamberti08446972019-11-26 16:38:31 +0000337 ARMNN_LOG(fatal) << errorMessage;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100338
339 inputTensorFile.close();
340 throw armnn::Exception(errorMessage);
341 }
342
343 inputTensorFile.close();
344}
345
346} // anonymous namespace
347
348bool generateTensorData = true;
349
350struct ExecuteNetworkParams
351{
352 using TensorShapePtr = std::unique_ptr<armnn::TensorShape>;
353
354 const char* m_ModelPath;
355 bool m_IsModelBinary;
356 std::vector<armnn::BackendId> m_ComputeDevices;
357 std::string m_DynamicBackendsPath;
358 std::vector<string> m_InputNames;
359 std::vector<TensorShapePtr> m_InputTensorShapes;
360 std::vector<string> m_InputTensorDataFilePaths;
361 std::vector<string> m_InputTypes;
362 bool m_QuantizeInput;
363 std::vector<string> m_OutputTypes;
364 std::vector<string> m_OutputNames;
365 std::vector<string> m_OutputTensorFiles;
366 bool m_EnableProfiling;
367 bool m_EnableFp16TurboMode;
368 double m_ThresholdTime;
369 bool m_PrintIntermediate;
370 size_t m_SubgraphId;
371 bool m_EnableLayerDetails = false;
372 bool m_GenerateTensorData;
Derek Lamberti132563c2019-12-02 16:06:40 +0000373 bool m_ParseUnsupported = false;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100374};
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100375
376template<typename TParser, typename TDataType>
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100377int MainImpl(const ExecuteNetworkParams& params,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100378 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
379{
380 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
381
382 std::vector<TContainer> inputDataContainers;
383
384 try
385 {
386 // Creates an InferenceModel, which will parse the model and load it into an IRuntime.
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100387 typename InferenceModel<TParser, TDataType>::Params inferenceModelParams;
388 inferenceModelParams.m_ModelPath = params.m_ModelPath;
389 inferenceModelParams.m_IsModelBinary = params.m_IsModelBinary;
390 inferenceModelParams.m_ComputeDevices = params.m_ComputeDevices;
391 inferenceModelParams.m_DynamicBackendsPath = params.m_DynamicBackendsPath;
392 inferenceModelParams.m_PrintIntermediateLayers = params.m_PrintIntermediate;
393 inferenceModelParams.m_VisualizePostOptimizationModel = params.m_EnableLayerDetails;
Derek Lamberti167c0822019-12-03 09:37:32 +0000394 inferenceModelParams.m_ParseUnsupported = params.m_ParseUnsupported;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100395
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100396 for(const std::string& inputName: params.m_InputNames)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100397 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100398 inferenceModelParams.m_InputBindings.push_back(inputName);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100399 }
400
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100401 for(unsigned int i = 0; i < params.m_InputTensorShapes.size(); ++i)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100402 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100403 inferenceModelParams.m_InputShapes.push_back(*params.m_InputTensorShapes[i]);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100404 }
405
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100406 for(const std::string& outputName: params.m_OutputNames)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100407 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100408 inferenceModelParams.m_OutputBindings.push_back(outputName);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100409 }
410
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100411 inferenceModelParams.m_SubgraphId = params.m_SubgraphId;
412 inferenceModelParams.m_EnableFp16TurboMode = params.m_EnableFp16TurboMode;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100413
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100414 InferenceModel<TParser, TDataType> model(inferenceModelParams,
415 params.m_EnableProfiling,
416 params.m_DynamicBackendsPath,
417 runtime);
418
419 const size_t numInputs = inferenceModelParams.m_InputBindings.size();
420 for(unsigned int i = 0; i < numInputs; ++i)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100421 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100422 armnn::Optional<QuantizationParams> qParams = params.m_QuantizeInput ?
423 armnn::MakeOptional<QuantizationParams>(model.GetInputQuantizationParams()) :
424 armnn::EmptyOptional();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100425
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100426 armnn::Optional<std::string> dataFile = params.m_GenerateTensorData ?
427 armnn::EmptyOptional() :
428 armnn::MakeOptional<std::string>(params.m_InputTensorDataFilePaths[i]);
429
430 unsigned int numElements = model.GetInputSize(i);
431 if (params.m_InputTensorShapes.size() > i && params.m_InputTensorShapes[i])
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100432 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100433 // If the user has provided a tensor shape for the current input,
434 // override numElements
435 numElements = params.m_InputTensorShapes[i]->GetNumElements();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100436 }
437
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100438 TContainer tensorData;
439 PopulateTensorWithData(tensorData,
440 numElements,
441 params.m_InputTypes[i],
442 qParams,
443 dataFile);
444
445 inputDataContainers.push_back(tensorData);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100446 }
447
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100448 const size_t numOutputs = inferenceModelParams.m_OutputBindings.size();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100449 std::vector<TContainer> outputDataContainers;
450
451 for (unsigned int i = 0; i < numOutputs; ++i)
452 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100453 if (params.m_OutputTypes[i].compare("float") == 0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100454 {
455 outputDataContainers.push_back(std::vector<float>(model.GetOutputSize(i)));
456 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100457 else if (params.m_OutputTypes[i].compare("int") == 0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100458 {
459 outputDataContainers.push_back(std::vector<int>(model.GetOutputSize(i)));
460 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100461 else if (params.m_OutputTypes[i].compare("qasymm8") == 0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100462 {
463 outputDataContainers.push_back(std::vector<uint8_t>(model.GetOutputSize(i)));
464 }
465 else
466 {
Derek Lamberti08446972019-11-26 16:38:31 +0000467 ARMNN_LOG(fatal) << "Unsupported tensor data type \"" << params.m_OutputTypes[i] << "\". ";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100468 return EXIT_FAILURE;
469 }
470 }
471
472 // model.Run returns the inference time elapsed in EnqueueWorkload (in milliseconds)
473 auto inference_duration = model.Run(inputDataContainers, outputDataContainers);
474
Matteo Martincighd6f26fc2019-10-28 10:48:05 +0000475 if (params.m_GenerateTensorData)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100476 {
Derek Lamberti08446972019-11-26 16:38:31 +0000477 ARMNN_LOG(warning) << "The input data was generated, note that the output will not be useful";
Matteo Martincighd6f26fc2019-10-28 10:48:05 +0000478 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100479
Matteo Martincighd6f26fc2019-10-28 10:48:05 +0000480 // Print output tensors
481 const auto& infosOut = model.GetOutputBindingInfos();
482 for (size_t i = 0; i < numOutputs; i++)
483 {
484 const armnn::TensorInfo& infoOut = infosOut[i].second;
485 auto outputTensorFile = params.m_OutputTensorFiles.empty() ? "" : params.m_OutputTensorFiles[i];
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100486
Matteo Martincighd6f26fc2019-10-28 10:48:05 +0000487 TensorPrinter printer(inferenceModelParams.m_OutputBindings[i], infoOut, outputTensorFile);
488 boost::apply_visitor(printer, outputDataContainers[i]);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100489 }
490
Derek Lamberti08446972019-11-26 16:38:31 +0000491 ARMNN_LOG(info) << "\nInference time: " << std::setprecision(2)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100492 << std::fixed << inference_duration.count() << " ms";
493
494 // If thresholdTime == 0.0 (default), then it hasn't been supplied at command line
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100495 if (params.m_ThresholdTime != 0.0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100496 {
Derek Lamberti08446972019-11-26 16:38:31 +0000497 ARMNN_LOG(info) << "Threshold time: " << std::setprecision(2)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100498 << std::fixed << params.m_ThresholdTime << " ms";
499 auto thresholdMinusInference = params.m_ThresholdTime - inference_duration.count();
Derek Lamberti08446972019-11-26 16:38:31 +0000500 ARMNN_LOG(info) << "Threshold time - Inference time: " << std::setprecision(2)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100501 << std::fixed << thresholdMinusInference << " ms" << "\n";
502
503 if (thresholdMinusInference < 0)
504 {
Nikhil Raj55377472020-01-20 11:50:16 +0000505 std::string errorMessage = "Elapsed inference time is greater than provided threshold time.";
506 ARMNN_LOG(fatal) << errorMessage;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100507 }
508 }
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100509 }
510 catch (armnn::Exception const& e)
511 {
Derek Lamberti08446972019-11-26 16:38:31 +0000512 ARMNN_LOG(fatal) << "Armnn Error: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100513 return EXIT_FAILURE;
514 }
515
516 return EXIT_SUCCESS;
517}
518
519// This will run a test
520int RunTest(const std::string& format,
521 const std::string& inputTensorShapesStr,
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100522 const vector<armnn::BackendId>& computeDevices,
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100523 const std::string& dynamicBackendsPath,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100524 const std::string& path,
525 const std::string& inputNames,
526 const std::string& inputTensorDataFilePaths,
527 const std::string& inputTypes,
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100528 bool quantizeInput,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100529 const std::string& outputTypes,
530 const std::string& outputNames,
Sadik Armagan77086282019-09-02 11:46:28 +0100531 const std::string& outputTensorFiles,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100532 bool enableProfiling,
533 bool enableFp16TurboMode,
534 const double& thresholdTime,
Matthew Jackson54658b92019-08-27 15:35:59 +0100535 bool printIntermediate,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100536 const size_t subgraphId,
Andre Ghattas23ae2ea2019-08-07 12:18:38 +0100537 bool enableLayerDetails = false,
Derek Lamberti132563c2019-12-02 16:06:40 +0000538 bool parseUnsupported = false,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100539 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
540{
541 std::string modelFormat = boost::trim_copy(format);
542 std::string modelPath = boost::trim_copy(path);
543 std::vector<std::string> inputNamesVector = ParseStringList(inputNames, ",");
Francis Murtagh1555cbd2019-10-08 14:47:46 +0100544 std::vector<std::string> inputTensorShapesVector = ParseStringList(inputTensorShapesStr, ":");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100545 std::vector<std::string> inputTensorDataFilePathsVector = ParseStringList(
546 inputTensorDataFilePaths, ",");
547 std::vector<std::string> outputNamesVector = ParseStringList(outputNames, ",");
548 std::vector<std::string> inputTypesVector = ParseStringList(inputTypes, ",");
549 std::vector<std::string> outputTypesVector = ParseStringList(outputTypes, ",");
Sadik Armagan77086282019-09-02 11:46:28 +0100550 std::vector<std::string> outputTensorFilesVector = ParseStringList(outputTensorFiles, ",");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100551
552 // Parse model binary flag from the model-format string we got from the command-line
553 bool isModelBinary;
554 if (modelFormat.find("bin") != std::string::npos)
555 {
556 isModelBinary = true;
557 }
558 else if (modelFormat.find("txt") != std::string::npos || modelFormat.find("text") != std::string::npos)
559 {
560 isModelBinary = false;
561 }
562 else
563 {
Derek Lamberti08446972019-11-26 16:38:31 +0000564 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100565 return EXIT_FAILURE;
566 }
567
568 if ((inputTensorShapesVector.size() != 0) && (inputTensorShapesVector.size() != inputNamesVector.size()))
569 {
Derek Lamberti08446972019-11-26 16:38:31 +0000570 ARMNN_LOG(fatal) << "input-name and input-tensor-shape must have the same amount of elements.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100571 return EXIT_FAILURE;
572 }
573
574 if ((inputTensorDataFilePathsVector.size() != 0) &&
575 (inputTensorDataFilePathsVector.size() != inputNamesVector.size()))
576 {
Derek Lamberti08446972019-11-26 16:38:31 +0000577 ARMNN_LOG(fatal) << "input-name and input-tensor-data must have the same amount of elements.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100578 return EXIT_FAILURE;
579 }
580
Sadik Armagan77086282019-09-02 11:46:28 +0100581 if ((outputTensorFilesVector.size() != 0) &&
582 (outputTensorFilesVector.size() != outputNamesVector.size()))
583 {
Derek Lamberti08446972019-11-26 16:38:31 +0000584 ARMNN_LOG(fatal) << "output-name and write-outputs-to-file must have the same amount of elements.";
Sadik Armagan77086282019-09-02 11:46:28 +0100585 return EXIT_FAILURE;
586 }
587
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100588 if (inputTypesVector.size() == 0)
589 {
590 //Defaults the value of all inputs to "float"
591 inputTypesVector.assign(inputNamesVector.size(), "float");
592 }
Matteo Martincigh08b51862019-08-29 16:26:10 +0100593 else if ((inputTypesVector.size() != 0) && (inputTypesVector.size() != inputNamesVector.size()))
594 {
Derek Lamberti08446972019-11-26 16:38:31 +0000595 ARMNN_LOG(fatal) << "input-name and input-type must have the same amount of elements.";
Matteo Martincigh08b51862019-08-29 16:26:10 +0100596 return EXIT_FAILURE;
597 }
598
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100599 if (outputTypesVector.size() == 0)
600 {
601 //Defaults the value of all outputs to "float"
602 outputTypesVector.assign(outputNamesVector.size(), "float");
603 }
Matteo Martincigh08b51862019-08-29 16:26:10 +0100604 else if ((outputTypesVector.size() != 0) && (outputTypesVector.size() != outputNamesVector.size()))
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100605 {
Derek Lamberti08446972019-11-26 16:38:31 +0000606 ARMNN_LOG(fatal) << "output-name and output-type must have the same amount of elements.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100607 return EXIT_FAILURE;
608 }
609
610 // Parse input tensor shape from the string we got from the command-line.
611 std::vector<std::unique_ptr<armnn::TensorShape>> inputTensorShapes;
612
613 if (!inputTensorShapesVector.empty())
614 {
615 inputTensorShapes.reserve(inputTensorShapesVector.size());
616
617 for(const std::string& shape : inputTensorShapesVector)
618 {
619 std::stringstream ss(shape);
620 std::vector<unsigned int> dims = ParseArray(ss);
621
622 try
623 {
624 // Coverity fix: An exception of type armnn::InvalidArgumentException is thrown and never caught.
625 inputTensorShapes.push_back(std::make_unique<armnn::TensorShape>(dims.size(), dims.data()));
626 }
627 catch (const armnn::InvalidArgumentException& e)
628 {
Derek Lamberti08446972019-11-26 16:38:31 +0000629 ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100630 return EXIT_FAILURE;
631 }
632 }
633 }
634
635 // Check that threshold time is not less than zero
636 if (thresholdTime < 0)
637 {
Derek Lamberti08446972019-11-26 16:38:31 +0000638 ARMNN_LOG(fatal) << "Threshold time supplied as a command line argument is less than zero.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100639 return EXIT_FAILURE;
640 }
641
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100642 ExecuteNetworkParams params;
643 params.m_ModelPath = modelPath.c_str();
644 params.m_IsModelBinary = isModelBinary;
645 params.m_ComputeDevices = computeDevices;
646 params.m_DynamicBackendsPath = dynamicBackendsPath;
647 params.m_InputNames = inputNamesVector;
648 params.m_InputTensorShapes = std::move(inputTensorShapes);
649 params.m_InputTensorDataFilePaths = inputTensorDataFilePathsVector;
650 params.m_InputTypes = inputTypesVector;
651 params.m_QuantizeInput = quantizeInput;
652 params.m_OutputTypes = outputTypesVector;
653 params.m_OutputNames = outputNamesVector;
654 params.m_OutputTensorFiles = outputTensorFilesVector;
655 params.m_EnableProfiling = enableProfiling;
656 params.m_EnableFp16TurboMode = enableFp16TurboMode;
657 params.m_ThresholdTime = thresholdTime;
658 params.m_PrintIntermediate = printIntermediate;
659 params.m_SubgraphId = subgraphId;
660 params.m_EnableLayerDetails = enableLayerDetails;
661 params.m_GenerateTensorData = inputTensorDataFilePathsVector.empty();
Derek Lamberti132563c2019-12-02 16:06:40 +0000662 params.m_ParseUnsupported = parseUnsupported;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100663
664 // Warn if ExecuteNetwork will generate dummy input data
665 if (params.m_GenerateTensorData)
666 {
Derek Lamberti08446972019-11-26 16:38:31 +0000667 ARMNN_LOG(warning) << "No input files provided, input tensors will be filled with 0s.";
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100668 }
669
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100670 // Forward to implementation based on the parser type
671 if (modelFormat.find("armnn") != std::string::npos)
672 {
673#if defined(ARMNN_SERIALIZER)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100674 return MainImpl<armnnDeserializer::IDeserializer, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100675#else
Derek Lamberti08446972019-11-26 16:38:31 +0000676 ARMNN_LOG(fatal) << "Not built with serialization support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100677 return EXIT_FAILURE;
678#endif
679 }
680 else if (modelFormat.find("caffe") != std::string::npos)
681 {
682#if defined(ARMNN_CAFFE_PARSER)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100683 return MainImpl<armnnCaffeParser::ICaffeParser, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100684#else
Derek Lamberti08446972019-11-26 16:38:31 +0000685 ARMNN_LOG(fatal) << "Not built with Caffe parser support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100686 return EXIT_FAILURE;
687#endif
688 }
689 else if (modelFormat.find("onnx") != std::string::npos)
690{
691#if defined(ARMNN_ONNX_PARSER)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100692 return MainImpl<armnnOnnxParser::IOnnxParser, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100693#else
Derek Lamberti08446972019-11-26 16:38:31 +0000694 ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100695 return EXIT_FAILURE;
696#endif
697 }
698 else if (modelFormat.find("tensorflow") != std::string::npos)
699 {
700#if defined(ARMNN_TF_PARSER)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100701 return MainImpl<armnnTfParser::ITfParser, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100702#else
Derek Lamberti08446972019-11-26 16:38:31 +0000703 ARMNN_LOG(fatal) << "Not built with Tensorflow parser support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100704 return EXIT_FAILURE;
705#endif
706 }
707 else if(modelFormat.find("tflite") != std::string::npos)
708 {
709#if defined(ARMNN_TF_LITE_PARSER)
710 if (! isModelBinary)
711 {
Derek Lamberti08446972019-11-26 16:38:31 +0000712 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100713 for tflite files";
714 return EXIT_FAILURE;
715 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100716 return MainImpl<armnnTfLiteParser::ITfLiteParser, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100717#else
Derek Lamberti08446972019-11-26 16:38:31 +0000718 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat <<
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100719 "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
720 return EXIT_FAILURE;
721#endif
722 }
723 else
724 {
Derek Lamberti08446972019-11-26 16:38:31 +0000725 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat <<
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100726 "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
727 return EXIT_FAILURE;
728 }
729}
730
731int RunCsvTest(const armnnUtils::CsvRow &csvRow, const std::shared_ptr<armnn::IRuntime>& runtime,
Matthew Jackson54658b92019-08-27 15:35:59 +0100732 const bool enableProfiling, const bool enableFp16TurboMode, const double& thresholdTime,
Derek Lamberti132563c2019-12-02 16:06:40 +0000733 const bool printIntermediate, bool enableLayerDetails = false, bool parseUnuspported = false)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100734{
Derek Lambertieb1fce02019-12-10 21:20:10 +0000735 boost::ignore_unused(runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100736 std::string modelFormat;
737 std::string modelPath;
738 std::string inputNames;
739 std::string inputTensorShapes;
740 std::string inputTensorDataFilePaths;
741 std::string outputNames;
742 std::string inputTypes;
743 std::string outputTypes;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100744 std::string dynamicBackendsPath;
Sadik Armagan77086282019-09-02 11:46:28 +0100745 std::string outputTensorFiles;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100746
747 size_t subgraphId = 0;
748
749 const std::string backendsMessage = std::string("The preferred order of devices to run layers on by default. ")
750 + std::string("Possible choices: ")
751 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
752
753 po::options_description desc("Options");
754 try
755 {
756 desc.add_options()
757 ("model-format,f", po::value(&modelFormat),
758 "armnn-binary, caffe-binary, caffe-text, tflite-binary, onnx-binary, onnx-text, tensorflow-binary or "
759 "tensorflow-text.")
760 ("model-path,m", po::value(&modelPath), "Path to model file, e.g. .armnn, .caffemodel, .prototxt, "
761 ".tflite, .onnx")
762 ("compute,c", po::value<std::vector<armnn::BackendId>>()->multitoken(),
763 backendsMessage.c_str())
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100764 ("dynamic-backends-path,b", po::value(&dynamicBackendsPath),
765 "Path where to load any available dynamic backend from. "
766 "If left empty (the default), dynamic backends will not be used.")
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100767 ("input-name,i", po::value(&inputNames), "Identifier of the input tensors in the network separated by comma.")
768 ("subgraph-number,n", po::value<size_t>(&subgraphId)->default_value(0), "Id of the subgraph to be "
769 "executed. Defaults to 0.")
770 ("input-tensor-shape,s", po::value(&inputTensorShapes),
771 "The shape of the input tensors in the network as a flat array of integers separated by comma. "
772 "Several shapes can be passed separating them by semicolon. "
773 "This parameter is optional, depending on the network.")
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100774 ("input-tensor-data,d", po::value(&inputTensorDataFilePaths)->default_value(""),
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100775 "Path to files containing the input data as a flat array separated by whitespace. "
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100776 "Several paths can be passed separating them by comma. If not specified, the network will be run with dummy "
777 "data (useful for profiling).")
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100778 ("input-type,y",po::value(&inputTypes), "The type of the input tensors in the network separated by comma. "
779 "If unset, defaults to \"float\" for all defined inputs. "
780 "Accepted values (float, int or qasymm8).")
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100781 ("quantize-input,q",po::bool_switch()->default_value(false),
782 "If this option is enabled, all float inputs will be quantized to qasymm8. "
783 "If unset, default to not quantized. "
784 "Accepted values (true or false)")
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100785 ("output-type,z",po::value(&outputTypes), "The type of the output tensors in the network separated by comma. "
786 "If unset, defaults to \"float\" for all defined outputs. "
787 "Accepted values (float, int or qasymm8).")
788 ("output-name,o", po::value(&outputNames),
Sadik Armagan77086282019-09-02 11:46:28 +0100789 "Identifier of the output tensors in the network separated by comma.")
790 ("write-outputs-to-file,w", po::value(&outputTensorFiles),
791 "Comma-separated list of output file paths keyed with the binding-id of the output slot. "
792 "If left empty (the default), the output tensors will not be written to a file.");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100793 }
794 catch (const std::exception& e)
795 {
796 // Coverity points out that default_value(...) can throw a bad_lexical_cast,
797 // and that desc.add_options() can throw boost::io::too_few_args.
798 // They really won't in any of these cases.
799 BOOST_ASSERT_MSG(false, "Caught unexpected exception");
Derek Lamberti08446972019-11-26 16:38:31 +0000800 ARMNN_LOG(fatal) << "Fatal internal error: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100801 return EXIT_FAILURE;
802 }
803
804 std::vector<const char*> clOptions;
805 clOptions.reserve(csvRow.values.size());
806 for (const std::string& value : csvRow.values)
807 {
808 clOptions.push_back(value.c_str());
809 }
810
811 po::variables_map vm;
812 try
813 {
814 po::store(po::parse_command_line(static_cast<int>(clOptions.size()), clOptions.data(), desc), vm);
815
816 po::notify(vm);
817
818 CheckOptionDependencies(vm);
819 }
820 catch (const po::error& e)
821 {
822 std::cerr << e.what() << std::endl << std::endl;
823 std::cerr << desc << std::endl;
824 return EXIT_FAILURE;
825 }
826
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100827 // Get the value of the switch arguments.
828 bool quantizeInput = vm["quantize-input"].as<bool>();
829
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100830 // Get the preferred order of compute devices.
831 std::vector<armnn::BackendId> computeDevices = vm["compute"].as<std::vector<armnn::BackendId>>();
832
833 // Remove duplicates from the list of compute devices.
834 RemoveDuplicateDevices(computeDevices);
835
836 // Check that the specified compute devices are valid.
837 std::string invalidBackends;
838 if (!CheckRequestedBackendsAreValid(computeDevices, armnn::Optional<std::string&>(invalidBackends)))
839 {
Derek Lamberti08446972019-11-26 16:38:31 +0000840 ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100841 << invalidBackends;
842 return EXIT_FAILURE;
843 }
844
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100845 return RunTest(modelFormat, inputTensorShapes, computeDevices, dynamicBackendsPath, modelPath, inputNames,
Sadik Armagan77086282019-09-02 11:46:28 +0100846 inputTensorDataFilePaths, inputTypes, quantizeInput, outputTypes, outputNames, outputTensorFiles,
Andre Ghattas23ae2ea2019-08-07 12:18:38 +0100847 enableProfiling, enableFp16TurboMode, thresholdTime, printIntermediate, subgraphId,
Derek Lamberti132563c2019-12-02 16:06:40 +0000848 enableLayerDetails, parseUnuspported);
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100849}