blob: a922228689194492e4bd2e6a64d6ce5b57808523 [file] [log] [blame]
Francis Murtaghbee4bc92019-06-18 12:30:37 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#include <armnn/ArmNN.hpp>
6#include <armnn/TypesUtils.hpp>
7
8#if defined(ARMNN_SERIALIZER)
9#include "armnnDeserializer/IDeserializer.hpp"
10#endif
11#if defined(ARMNN_CAFFE_PARSER)
12#include "armnnCaffeParser/ICaffeParser.hpp"
13#endif
14#if defined(ARMNN_TF_PARSER)
15#include "armnnTfParser/ITfParser.hpp"
16#endif
17#if defined(ARMNN_TF_LITE_PARSER)
18#include "armnnTfLiteParser/ITfLiteParser.hpp"
19#endif
20#if defined(ARMNN_ONNX_PARSER)
21#include "armnnOnnxParser/IOnnxParser.hpp"
22#endif
23#include "CsvReader.hpp"
24#include "../InferenceTest.hpp"
25
Francis Murtaghbee4bc92019-06-18 12:30:37 +010026#include <Profiling.hpp>
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +010027#include <ResolveType.hpp>
Francis Murtaghbee4bc92019-06-18 12:30:37 +010028
Francis Murtaghbee4bc92019-06-18 12:30:37 +010029#include <boost/program_options.hpp>
30#include <boost/variant.hpp>
31
32#include <iostream>
33#include <fstream>
34#include <functional>
35#include <future>
36#include <algorithm>
37#include <iterator>
38
39namespace
40{
41
42// Configure boost::program_options for command-line parsing and validation.
43namespace po = boost::program_options;
44
45template<typename T, typename TParseElementFunc>
46std::vector<T> ParseArrayImpl(std::istream& stream, TParseElementFunc parseElementFunc, const char * chars = "\t ,:")
47{
48 std::vector<T> result;
49 // Processes line-by-line.
50 std::string line;
51 while (std::getline(stream, line))
52 {
David Monahana8837bf2020-04-16 10:01:56 +010053 std::vector<std::string> tokens = armnn::stringUtils::StringTokenizer(line, chars);
Francis Murtaghbee4bc92019-06-18 12:30:37 +010054 for (const std::string& token : tokens)
55 {
56 if (!token.empty()) // See https://stackoverflow.com/questions/10437406/
57 {
58 try
59 {
60 result.push_back(parseElementFunc(token));
61 }
62 catch (const std::exception&)
63 {
Derek Lamberti08446972019-11-26 16:38:31 +000064 ARMNN_LOG(error) << "'" << token << "' is not a valid number. It has been ignored.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +010065 }
66 }
67 }
68 }
69
70 return result;
71}
72
73bool CheckOption(const po::variables_map& vm,
74 const char* option)
75{
76 // Check that the given option is valid.
77 if (option == nullptr)
78 {
79 return false;
80 }
81
82 // Check whether 'option' is provided.
83 return vm.find(option) != vm.end();
84}
85
86void CheckOptionDependency(const po::variables_map& vm,
87 const char* option,
88 const char* required)
89{
90 // Check that the given options are valid.
91 if (option == nullptr || required == nullptr)
92 {
93 throw po::error("Invalid option to check dependency for");
94 }
95
96 // Check that if 'option' is provided, 'required' is also provided.
97 if (CheckOption(vm, option) && !vm[option].defaulted())
98 {
99 if (CheckOption(vm, required) == 0 || vm[required].defaulted())
100 {
101 throw po::error(std::string("Option '") + option + "' requires option '" + required + "'.");
102 }
103 }
104}
105
106void CheckOptionDependencies(const po::variables_map& vm)
107{
108 CheckOptionDependency(vm, "model-path", "model-format");
109 CheckOptionDependency(vm, "model-path", "input-name");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100110 CheckOptionDependency(vm, "model-path", "output-name");
111 CheckOptionDependency(vm, "input-tensor-shape", "model-path");
112}
113
114template<armnn::DataType NonQuantizedType>
115auto ParseDataArray(std::istream & stream);
116
117template<armnn::DataType QuantizedType>
118auto ParseDataArray(std::istream& stream,
119 const float& quantizationScale,
120 const int32_t& quantizationOffset);
121
122template<>
123auto ParseDataArray<armnn::DataType::Float32>(std::istream & stream)
124{
125 return ParseArrayImpl<float>(stream, [](const std::string& s) { return std::stof(s); });
126}
127
128template<>
129auto ParseDataArray<armnn::DataType::Signed32>(std::istream & stream)
130{
131 return ParseArrayImpl<int>(stream, [](const std::string & s) { return std::stoi(s); });
132}
133
134template<>
Derek Lambertif90c56d2020-01-10 17:14:08 +0000135auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream)
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100136{
137 return ParseArrayImpl<uint8_t>(stream,
138 [](const std::string& s) { return boost::numeric_cast<uint8_t>(std::stoi(s)); });
139}
140
141template<>
Derek Lambertif90c56d2020-01-10 17:14:08 +0000142auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100143 const float& quantizationScale,
144 const int32_t& quantizationOffset)
145{
146 return ParseArrayImpl<uint8_t>(stream,
147 [&quantizationScale, &quantizationOffset](const std::string & s)
148 {
149 return boost::numeric_cast<uint8_t>(
Rob Hughes93667b12019-09-23 16:24:05 +0100150 armnn::Quantize<uint8_t>(std::stof(s),
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100151 quantizationScale,
152 quantizationOffset));
153 });
154}
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100155std::vector<unsigned int> ParseArray(std::istream& stream)
156{
157 return ParseArrayImpl<unsigned int>(stream,
158 [](const std::string& s) { return boost::numeric_cast<unsigned int>(std::stoi(s)); });
159}
160
161std::vector<std::string> ParseStringList(const std::string & inputString, const char * delimiter)
162{
163 std::stringstream stream(inputString);
David Monahana8837bf2020-04-16 10:01:56 +0100164 return ParseArrayImpl<std::string>(stream, [](const std::string& s) {
165 return armnn::stringUtils::StringTrimCopy(s); }, delimiter);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100166}
167
168void RemoveDuplicateDevices(std::vector<armnn::BackendId>& computeDevices)
169{
170 // Mark the duplicate devices as 'Undefined'.
171 for (auto i = computeDevices.begin(); i != computeDevices.end(); ++i)
172 {
173 for (auto j = std::next(i); j != computeDevices.end(); ++j)
174 {
175 if (*j == *i)
176 {
177 *j = armnn::Compute::Undefined;
178 }
179 }
180 }
181
182 // Remove 'Undefined' devices.
183 computeDevices.erase(std::remove(computeDevices.begin(), computeDevices.end(), armnn::Compute::Undefined),
184 computeDevices.end());
185}
186
187struct TensorPrinter : public boost::static_visitor<>
188{
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000189 TensorPrinter(const std::string& binding,
190 const armnn::TensorInfo& info,
191 const std::string& outputTensorFile,
192 bool dequantizeOutput)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100193 : m_OutputBinding(binding)
194 , m_Scale(info.GetQuantizationScale())
195 , m_Offset(info.GetQuantizationOffset())
Sadik Armagan77086282019-09-02 11:46:28 +0100196 , m_OutputTensorFile(outputTensorFile)
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000197 , m_DequantizeOutput(dequantizeOutput)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100198 {}
199
200 void operator()(const std::vector<float>& values)
201 {
Sadik Armagan77086282019-09-02 11:46:28 +0100202 ForEachValue(values, [](float value)
203 {
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100204 printf("%f ", value);
205 });
Sadik Armagan77086282019-09-02 11:46:28 +0100206 WriteToFile(values);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100207 }
208
209 void operator()(const std::vector<uint8_t>& values)
210 {
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000211 if(m_DequantizeOutput)
212 {
213 auto& scale = m_Scale;
214 auto& offset = m_Offset;
215 std::vector<float> dequantizedValues;
216 ForEachValue(values, [&scale, &offset, &dequantizedValues](uint8_t value)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100217 {
Sadik Armagan77086282019-09-02 11:46:28 +0100218 auto dequantizedValue = armnn::Dequantize(value, scale, offset);
219 printf("%f ", dequantizedValue);
220 dequantizedValues.push_back(dequantizedValue);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100221 });
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000222 WriteToFile(dequantizedValues);
223 }
224 else
225 {
226 const std::vector<int> intValues(values.begin(), values.end());
227 operator()(intValues);
228 }
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100229 }
230
231 void operator()(const std::vector<int>& values)
232 {
233 ForEachValue(values, [](int value)
234 {
235 printf("%d ", value);
236 });
Sadik Armagan77086282019-09-02 11:46:28 +0100237 WriteToFile(values);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100238 }
239
240private:
241 template<typename Container, typename Delegate>
242 void ForEachValue(const Container& c, Delegate delegate)
243 {
244 std::cout << m_OutputBinding << ": ";
245 for (const auto& value : c)
246 {
247 delegate(value);
248 }
249 printf("\n");
250 }
251
Sadik Armagan77086282019-09-02 11:46:28 +0100252 template<typename T>
253 void WriteToFile(const std::vector<T>& values)
254 {
255 if (!m_OutputTensorFile.empty())
256 {
257 std::ofstream outputTensorFile;
258 outputTensorFile.open(m_OutputTensorFile, std::ofstream::out | std::ofstream::trunc);
259 if (outputTensorFile.is_open())
260 {
261 outputTensorFile << m_OutputBinding << ": ";
262 std::copy(values.begin(), values.end(), std::ostream_iterator<T>(outputTensorFile, " "));
263 }
264 else
265 {
Derek Lamberti08446972019-11-26 16:38:31 +0000266 ARMNN_LOG(info) << "Output Tensor File: " << m_OutputTensorFile << " could not be opened!";
Sadik Armagan77086282019-09-02 11:46:28 +0100267 }
268 outputTensorFile.close();
269 }
270 }
271
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100272 std::string m_OutputBinding;
273 float m_Scale=0.0f;
274 int m_Offset=0;
Sadik Armagan77086282019-09-02 11:46:28 +0100275 std::string m_OutputTensorFile;
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000276 bool m_DequantizeOutput = false;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100277};
278
279
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100280
281template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
282std::vector<T> GenerateDummyTensorData(unsigned int numElements)
283{
284 return std::vector<T>(numElements, static_cast<T>(0));
285}
286
287using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
288using QuantizationParams = std::pair<float, int32_t>;
289
290void PopulateTensorWithData(TContainer& tensorData,
291 unsigned int numElements,
292 const std::string& dataTypeStr,
293 const armnn::Optional<QuantizationParams>& qParams,
294 const armnn::Optional<std::string>& dataFile)
295{
296 const bool readFromFile = dataFile.has_value() && !dataFile.value().empty();
297 const bool quantizeData = qParams.has_value();
298
299 std::ifstream inputTensorFile;
300 if (readFromFile)
301 {
302 inputTensorFile = std::ifstream(dataFile.value());
303 }
304
305 if (dataTypeStr.compare("float") == 0)
306 {
307 if (quantizeData)
308 {
309 const float qScale = qParams.value().first;
310 const int qOffset = qParams.value().second;
311
312 tensorData = readFromFile ?
Derek Lambertif90c56d2020-01-10 17:14:08 +0000313 ParseDataArray<armnn::DataType::QAsymmU8>(inputTensorFile, qScale, qOffset) :
314 GenerateDummyTensorData<armnn::DataType::QAsymmU8>(numElements);
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100315 }
316 else
317 {
318 tensorData = readFromFile ?
319 ParseDataArray<armnn::DataType::Float32>(inputTensorFile) :
320 GenerateDummyTensorData<armnn::DataType::Float32>(numElements);
321 }
322 }
323 else if (dataTypeStr.compare("int") == 0)
324 {
325 tensorData = readFromFile ?
326 ParseDataArray<armnn::DataType::Signed32>(inputTensorFile) :
327 GenerateDummyTensorData<armnn::DataType::Signed32>(numElements);
328 }
329 else if (dataTypeStr.compare("qasymm8") == 0)
330 {
331 tensorData = readFromFile ?
Derek Lambertif90c56d2020-01-10 17:14:08 +0000332 ParseDataArray<armnn::DataType::QAsymmU8>(inputTensorFile) :
333 GenerateDummyTensorData<armnn::DataType::QAsymmU8>(numElements);
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100334 }
335 else
336 {
337 std::string errorMessage = "Unsupported tensor data type " + dataTypeStr;
Derek Lamberti08446972019-11-26 16:38:31 +0000338 ARMNN_LOG(fatal) << errorMessage;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100339
340 inputTensorFile.close();
341 throw armnn::Exception(errorMessage);
342 }
343
344 inputTensorFile.close();
345}
346
347} // anonymous namespace
348
349bool generateTensorData = true;
350
351struct ExecuteNetworkParams
352{
353 using TensorShapePtr = std::unique_ptr<armnn::TensorShape>;
354
355 const char* m_ModelPath;
356 bool m_IsModelBinary;
357 std::vector<armnn::BackendId> m_ComputeDevices;
358 std::string m_DynamicBackendsPath;
359 std::vector<string> m_InputNames;
360 std::vector<TensorShapePtr> m_InputTensorShapes;
361 std::vector<string> m_InputTensorDataFilePaths;
362 std::vector<string> m_InputTypes;
363 bool m_QuantizeInput;
364 std::vector<string> m_OutputTypes;
365 std::vector<string> m_OutputNames;
366 std::vector<string> m_OutputTensorFiles;
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000367 bool m_DequantizeOutput;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100368 bool m_EnableProfiling;
369 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000370 bool m_EnableBf16TurboMode;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100371 double m_ThresholdTime;
372 bool m_PrintIntermediate;
373 size_t m_SubgraphId;
374 bool m_EnableLayerDetails = false;
375 bool m_GenerateTensorData;
Derek Lamberti132563c2019-12-02 16:06:40 +0000376 bool m_ParseUnsupported = false;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100377};
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100378
379template<typename TParser, typename TDataType>
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100380int MainImpl(const ExecuteNetworkParams& params,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100381 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
382{
383 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
384
385 std::vector<TContainer> inputDataContainers;
386
387 try
388 {
389 // Creates an InferenceModel, which will parse the model and load it into an IRuntime.
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100390 typename InferenceModel<TParser, TDataType>::Params inferenceModelParams;
391 inferenceModelParams.m_ModelPath = params.m_ModelPath;
392 inferenceModelParams.m_IsModelBinary = params.m_IsModelBinary;
393 inferenceModelParams.m_ComputeDevices = params.m_ComputeDevices;
394 inferenceModelParams.m_DynamicBackendsPath = params.m_DynamicBackendsPath;
395 inferenceModelParams.m_PrintIntermediateLayers = params.m_PrintIntermediate;
396 inferenceModelParams.m_VisualizePostOptimizationModel = params.m_EnableLayerDetails;
Derek Lamberti167c0822019-12-03 09:37:32 +0000397 inferenceModelParams.m_ParseUnsupported = params.m_ParseUnsupported;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100398
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100399 for(const std::string& inputName: params.m_InputNames)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100400 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100401 inferenceModelParams.m_InputBindings.push_back(inputName);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100402 }
403
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100404 for(unsigned int i = 0; i < params.m_InputTensorShapes.size(); ++i)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100405 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100406 inferenceModelParams.m_InputShapes.push_back(*params.m_InputTensorShapes[i]);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100407 }
408
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100409 for(const std::string& outputName: params.m_OutputNames)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100410 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100411 inferenceModelParams.m_OutputBindings.push_back(outputName);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100412 }
413
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100414 inferenceModelParams.m_SubgraphId = params.m_SubgraphId;
415 inferenceModelParams.m_EnableFp16TurboMode = params.m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000416 inferenceModelParams.m_EnableBf16TurboMode = params.m_EnableBf16TurboMode;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100417
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100418 InferenceModel<TParser, TDataType> model(inferenceModelParams,
419 params.m_EnableProfiling,
420 params.m_DynamicBackendsPath,
421 runtime);
422
423 const size_t numInputs = inferenceModelParams.m_InputBindings.size();
424 for(unsigned int i = 0; i < numInputs; ++i)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100425 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100426 armnn::Optional<QuantizationParams> qParams = params.m_QuantizeInput ?
427 armnn::MakeOptional<QuantizationParams>(model.GetInputQuantizationParams()) :
428 armnn::EmptyOptional();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100429
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100430 armnn::Optional<std::string> dataFile = params.m_GenerateTensorData ?
431 armnn::EmptyOptional() :
432 armnn::MakeOptional<std::string>(params.m_InputTensorDataFilePaths[i]);
433
434 unsigned int numElements = model.GetInputSize(i);
435 if (params.m_InputTensorShapes.size() > i && params.m_InputTensorShapes[i])
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100436 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100437 // If the user has provided a tensor shape for the current input,
438 // override numElements
439 numElements = params.m_InputTensorShapes[i]->GetNumElements();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100440 }
441
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100442 TContainer tensorData;
443 PopulateTensorWithData(tensorData,
444 numElements,
445 params.m_InputTypes[i],
446 qParams,
447 dataFile);
448
449 inputDataContainers.push_back(tensorData);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100450 }
451
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100452 const size_t numOutputs = inferenceModelParams.m_OutputBindings.size();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100453 std::vector<TContainer> outputDataContainers;
454
455 for (unsigned int i = 0; i < numOutputs; ++i)
456 {
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100457 if (params.m_OutputTypes[i].compare("float") == 0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100458 {
459 outputDataContainers.push_back(std::vector<float>(model.GetOutputSize(i)));
460 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100461 else if (params.m_OutputTypes[i].compare("int") == 0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100462 {
463 outputDataContainers.push_back(std::vector<int>(model.GetOutputSize(i)));
464 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100465 else if (params.m_OutputTypes[i].compare("qasymm8") == 0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100466 {
467 outputDataContainers.push_back(std::vector<uint8_t>(model.GetOutputSize(i)));
468 }
469 else
470 {
Derek Lamberti08446972019-11-26 16:38:31 +0000471 ARMNN_LOG(fatal) << "Unsupported tensor data type \"" << params.m_OutputTypes[i] << "\". ";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100472 return EXIT_FAILURE;
473 }
474 }
475
476 // model.Run returns the inference time elapsed in EnqueueWorkload (in milliseconds)
477 auto inference_duration = model.Run(inputDataContainers, outputDataContainers);
478
Matteo Martincighd6f26fc2019-10-28 10:48:05 +0000479 if (params.m_GenerateTensorData)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100480 {
Derek Lamberti08446972019-11-26 16:38:31 +0000481 ARMNN_LOG(warning) << "The input data was generated, note that the output will not be useful";
Matteo Martincighd6f26fc2019-10-28 10:48:05 +0000482 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100483
Matteo Martincighd6f26fc2019-10-28 10:48:05 +0000484 // Print output tensors
485 const auto& infosOut = model.GetOutputBindingInfos();
486 for (size_t i = 0; i < numOutputs; i++)
487 {
488 const armnn::TensorInfo& infoOut = infosOut[i].second;
489 auto outputTensorFile = params.m_OutputTensorFiles.empty() ? "" : params.m_OutputTensorFiles[i];
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100490
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000491 TensorPrinter printer(inferenceModelParams.m_OutputBindings[i],
492 infoOut,
493 outputTensorFile,
494 params.m_DequantizeOutput);
Matteo Martincighd6f26fc2019-10-28 10:48:05 +0000495 boost::apply_visitor(printer, outputDataContainers[i]);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100496 }
497
Derek Lamberti08446972019-11-26 16:38:31 +0000498 ARMNN_LOG(info) << "\nInference time: " << std::setprecision(2)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100499 << std::fixed << inference_duration.count() << " ms";
500
501 // If thresholdTime == 0.0 (default), then it hasn't been supplied at command line
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100502 if (params.m_ThresholdTime != 0.0)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100503 {
Derek Lamberti08446972019-11-26 16:38:31 +0000504 ARMNN_LOG(info) << "Threshold time: " << std::setprecision(2)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100505 << std::fixed << params.m_ThresholdTime << " ms";
506 auto thresholdMinusInference = params.m_ThresholdTime - inference_duration.count();
Derek Lamberti08446972019-11-26 16:38:31 +0000507 ARMNN_LOG(info) << "Threshold time - Inference time: " << std::setprecision(2)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100508 << std::fixed << thresholdMinusInference << " ms" << "\n";
509
510 if (thresholdMinusInference < 0)
511 {
Nikhil Raj55377472020-01-20 11:50:16 +0000512 std::string errorMessage = "Elapsed inference time is greater than provided threshold time.";
513 ARMNN_LOG(fatal) << errorMessage;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100514 }
515 }
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100516 }
517 catch (armnn::Exception const& e)
518 {
Derek Lamberti08446972019-11-26 16:38:31 +0000519 ARMNN_LOG(fatal) << "Armnn Error: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100520 return EXIT_FAILURE;
521 }
522
523 return EXIT_SUCCESS;
524}
525
526// This will run a test
527int RunTest(const std::string& format,
528 const std::string& inputTensorShapesStr,
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100529 const vector<armnn::BackendId>& computeDevices,
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100530 const std::string& dynamicBackendsPath,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100531 const std::string& path,
532 const std::string& inputNames,
533 const std::string& inputTensorDataFilePaths,
534 const std::string& inputTypes,
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100535 bool quantizeInput,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100536 const std::string& outputTypes,
537 const std::string& outputNames,
Sadik Armagan77086282019-09-02 11:46:28 +0100538 const std::string& outputTensorFiles,
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000539 bool dequantizeOuput,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100540 bool enableProfiling,
541 bool enableFp16TurboMode,
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000542 bool enableBf16TurboMode,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100543 const double& thresholdTime,
Matthew Jackson54658b92019-08-27 15:35:59 +0100544 bool printIntermediate,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100545 const size_t subgraphId,
Andre Ghattas23ae2ea2019-08-07 12:18:38 +0100546 bool enableLayerDetails = false,
Derek Lamberti132563c2019-12-02 16:06:40 +0000547 bool parseUnsupported = false,
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100548 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
549{
David Monahana8837bf2020-04-16 10:01:56 +0100550 std::string modelFormat = armnn::stringUtils::StringTrimCopy(format);
551 std::string modelPath = armnn::stringUtils::StringTrimCopy(path);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100552 std::vector<std::string> inputNamesVector = ParseStringList(inputNames, ",");
Francis Murtagh1555cbd2019-10-08 14:47:46 +0100553 std::vector<std::string> inputTensorShapesVector = ParseStringList(inputTensorShapesStr, ":");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100554 std::vector<std::string> inputTensorDataFilePathsVector = ParseStringList(
555 inputTensorDataFilePaths, ",");
556 std::vector<std::string> outputNamesVector = ParseStringList(outputNames, ",");
557 std::vector<std::string> inputTypesVector = ParseStringList(inputTypes, ",");
558 std::vector<std::string> outputTypesVector = ParseStringList(outputTypes, ",");
Sadik Armagan77086282019-09-02 11:46:28 +0100559 std::vector<std::string> outputTensorFilesVector = ParseStringList(outputTensorFiles, ",");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100560
561 // Parse model binary flag from the model-format string we got from the command-line
562 bool isModelBinary;
563 if (modelFormat.find("bin") != std::string::npos)
564 {
565 isModelBinary = true;
566 }
567 else if (modelFormat.find("txt") != std::string::npos || modelFormat.find("text") != std::string::npos)
568 {
569 isModelBinary = false;
570 }
571 else
572 {
Derek Lamberti08446972019-11-26 16:38:31 +0000573 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100574 return EXIT_FAILURE;
575 }
576
577 if ((inputTensorShapesVector.size() != 0) && (inputTensorShapesVector.size() != inputNamesVector.size()))
578 {
Derek Lamberti08446972019-11-26 16:38:31 +0000579 ARMNN_LOG(fatal) << "input-name and input-tensor-shape must have the same amount of elements.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100580 return EXIT_FAILURE;
581 }
582
583 if ((inputTensorDataFilePathsVector.size() != 0) &&
584 (inputTensorDataFilePathsVector.size() != inputNamesVector.size()))
585 {
Derek Lamberti08446972019-11-26 16:38:31 +0000586 ARMNN_LOG(fatal) << "input-name and input-tensor-data must have the same amount of elements.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100587 return EXIT_FAILURE;
588 }
589
Sadik Armagan77086282019-09-02 11:46:28 +0100590 if ((outputTensorFilesVector.size() != 0) &&
591 (outputTensorFilesVector.size() != outputNamesVector.size()))
592 {
Derek Lamberti08446972019-11-26 16:38:31 +0000593 ARMNN_LOG(fatal) << "output-name and write-outputs-to-file must have the same amount of elements.";
Sadik Armagan77086282019-09-02 11:46:28 +0100594 return EXIT_FAILURE;
595 }
596
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100597 if (inputTypesVector.size() == 0)
598 {
599 //Defaults the value of all inputs to "float"
600 inputTypesVector.assign(inputNamesVector.size(), "float");
601 }
Matteo Martincigh08b51862019-08-29 16:26:10 +0100602 else if ((inputTypesVector.size() != 0) && (inputTypesVector.size() != inputNamesVector.size()))
603 {
Derek Lamberti08446972019-11-26 16:38:31 +0000604 ARMNN_LOG(fatal) << "input-name and input-type must have the same amount of elements.";
Matteo Martincigh08b51862019-08-29 16:26:10 +0100605 return EXIT_FAILURE;
606 }
607
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100608 if (outputTypesVector.size() == 0)
609 {
610 //Defaults the value of all outputs to "float"
611 outputTypesVector.assign(outputNamesVector.size(), "float");
612 }
Matteo Martincigh08b51862019-08-29 16:26:10 +0100613 else if ((outputTypesVector.size() != 0) && (outputTypesVector.size() != outputNamesVector.size()))
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100614 {
Derek Lamberti08446972019-11-26 16:38:31 +0000615 ARMNN_LOG(fatal) << "output-name and output-type must have the same amount of elements.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100616 return EXIT_FAILURE;
617 }
618
619 // Parse input tensor shape from the string we got from the command-line.
620 std::vector<std::unique_ptr<armnn::TensorShape>> inputTensorShapes;
621
622 if (!inputTensorShapesVector.empty())
623 {
624 inputTensorShapes.reserve(inputTensorShapesVector.size());
625
626 for(const std::string& shape : inputTensorShapesVector)
627 {
628 std::stringstream ss(shape);
629 std::vector<unsigned int> dims = ParseArray(ss);
630
631 try
632 {
633 // Coverity fix: An exception of type armnn::InvalidArgumentException is thrown and never caught.
634 inputTensorShapes.push_back(std::make_unique<armnn::TensorShape>(dims.size(), dims.data()));
635 }
636 catch (const armnn::InvalidArgumentException& e)
637 {
Derek Lamberti08446972019-11-26 16:38:31 +0000638 ARMNN_LOG(fatal) << "Cannot create tensor shape: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100639 return EXIT_FAILURE;
640 }
641 }
642 }
643
644 // Check that threshold time is not less than zero
645 if (thresholdTime < 0)
646 {
Derek Lamberti08446972019-11-26 16:38:31 +0000647 ARMNN_LOG(fatal) << "Threshold time supplied as a command line argument is less than zero.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100648 return EXIT_FAILURE;
649 }
650
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100651 ExecuteNetworkParams params;
652 params.m_ModelPath = modelPath.c_str();
653 params.m_IsModelBinary = isModelBinary;
654 params.m_ComputeDevices = computeDevices;
655 params.m_DynamicBackendsPath = dynamicBackendsPath;
656 params.m_InputNames = inputNamesVector;
657 params.m_InputTensorShapes = std::move(inputTensorShapes);
658 params.m_InputTensorDataFilePaths = inputTensorDataFilePathsVector;
659 params.m_InputTypes = inputTypesVector;
660 params.m_QuantizeInput = quantizeInput;
661 params.m_OutputTypes = outputTypesVector;
662 params.m_OutputNames = outputNamesVector;
663 params.m_OutputTensorFiles = outputTensorFilesVector;
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000664 params.m_DequantizeOutput = dequantizeOuput;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100665 params.m_EnableProfiling = enableProfiling;
666 params.m_EnableFp16TurboMode = enableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000667 params.m_EnableBf16TurboMode = enableBf16TurboMode;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100668 params.m_ThresholdTime = thresholdTime;
669 params.m_PrintIntermediate = printIntermediate;
670 params.m_SubgraphId = subgraphId;
671 params.m_EnableLayerDetails = enableLayerDetails;
672 params.m_GenerateTensorData = inputTensorDataFilePathsVector.empty();
Derek Lamberti132563c2019-12-02 16:06:40 +0000673 params.m_ParseUnsupported = parseUnsupported;
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100674
675 // Warn if ExecuteNetwork will generate dummy input data
676 if (params.m_GenerateTensorData)
677 {
Derek Lamberti08446972019-11-26 16:38:31 +0000678 ARMNN_LOG(warning) << "No input files provided, input tensors will be filled with 0s.";
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100679 }
680
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100681 // Forward to implementation based on the parser type
682 if (modelFormat.find("armnn") != std::string::npos)
683 {
684#if defined(ARMNN_SERIALIZER)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100685 return MainImpl<armnnDeserializer::IDeserializer, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100686#else
Derek Lamberti08446972019-11-26 16:38:31 +0000687 ARMNN_LOG(fatal) << "Not built with serialization support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100688 return EXIT_FAILURE;
689#endif
690 }
691 else if (modelFormat.find("caffe") != std::string::npos)
692 {
693#if defined(ARMNN_CAFFE_PARSER)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100694 return MainImpl<armnnCaffeParser::ICaffeParser, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100695#else
Derek Lamberti08446972019-11-26 16:38:31 +0000696 ARMNN_LOG(fatal) << "Not built with Caffe parser support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100697 return EXIT_FAILURE;
698#endif
699 }
700 else if (modelFormat.find("onnx") != std::string::npos)
701{
702#if defined(ARMNN_ONNX_PARSER)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100703 return MainImpl<armnnOnnxParser::IOnnxParser, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100704#else
Derek Lamberti08446972019-11-26 16:38:31 +0000705 ARMNN_LOG(fatal) << "Not built with Onnx parser support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100706 return EXIT_FAILURE;
707#endif
708 }
709 else if (modelFormat.find("tensorflow") != std::string::npos)
710 {
711#if defined(ARMNN_TF_PARSER)
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100712 return MainImpl<armnnTfParser::ITfParser, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100713#else
Derek Lamberti08446972019-11-26 16:38:31 +0000714 ARMNN_LOG(fatal) << "Not built with Tensorflow parser support.";
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100715 return EXIT_FAILURE;
716#endif
717 }
718 else if(modelFormat.find("tflite") != std::string::npos)
719 {
720#if defined(ARMNN_TF_LITE_PARSER)
721 if (! isModelBinary)
722 {
Derek Lamberti08446972019-11-26 16:38:31 +0000723 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100724 for tflite files";
725 return EXIT_FAILURE;
726 }
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100727 return MainImpl<armnnTfLiteParser::ITfLiteParser, float>(params, runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100728#else
Derek Lamberti08446972019-11-26 16:38:31 +0000729 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat <<
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100730 "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
731 return EXIT_FAILURE;
732#endif
733 }
734 else
735 {
Derek Lamberti08446972019-11-26 16:38:31 +0000736 ARMNN_LOG(fatal) << "Unknown model format: '" << modelFormat <<
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100737 "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
738 return EXIT_FAILURE;
739 }
740}
741
742int RunCsvTest(const armnnUtils::CsvRow &csvRow, const std::shared_ptr<armnn::IRuntime>& runtime,
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000743 const bool enableProfiling, const bool enableFp16TurboMode, const bool enableBf16TurboMode,
744 const double& thresholdTime, const bool printIntermediate, bool enableLayerDetails = false,
745 bool parseUnuspported = false)
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100746{
Jan Eilers8eb25602020-03-09 12:13:48 +0000747 IgnoreUnused(runtime);
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100748 std::string modelFormat;
749 std::string modelPath;
750 std::string inputNames;
751 std::string inputTensorShapes;
752 std::string inputTensorDataFilePaths;
753 std::string outputNames;
754 std::string inputTypes;
755 std::string outputTypes;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100756 std::string dynamicBackendsPath;
Sadik Armagan77086282019-09-02 11:46:28 +0100757 std::string outputTensorFiles;
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100758
759 size_t subgraphId = 0;
760
761 const std::string backendsMessage = std::string("The preferred order of devices to run layers on by default. ")
762 + std::string("Possible choices: ")
763 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
764
765 po::options_description desc("Options");
766 try
767 {
768 desc.add_options()
769 ("model-format,f", po::value(&modelFormat),
770 "armnn-binary, caffe-binary, caffe-text, tflite-binary, onnx-binary, onnx-text, tensorflow-binary or "
771 "tensorflow-text.")
772 ("model-path,m", po::value(&modelPath), "Path to model file, e.g. .armnn, .caffemodel, .prototxt, "
773 ".tflite, .onnx")
774 ("compute,c", po::value<std::vector<armnn::BackendId>>()->multitoken(),
775 backendsMessage.c_str())
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100776 ("dynamic-backends-path,b", po::value(&dynamicBackendsPath),
777 "Path where to load any available dynamic backend from. "
778 "If left empty (the default), dynamic backends will not be used.")
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100779 ("input-name,i", po::value(&inputNames), "Identifier of the input tensors in the network separated by comma.")
780 ("subgraph-number,n", po::value<size_t>(&subgraphId)->default_value(0), "Id of the subgraph to be "
781 "executed. Defaults to 0.")
782 ("input-tensor-shape,s", po::value(&inputTensorShapes),
783 "The shape of the input tensors in the network as a flat array of integers separated by comma. "
784 "Several shapes can be passed separating them by semicolon. "
785 "This parameter is optional, depending on the network.")
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100786 ("input-tensor-data,d", po::value(&inputTensorDataFilePaths)->default_value(""),
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100787 "Path to files containing the input data as a flat array separated by whitespace. "
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100788 "Several paths can be passed separating them by comma. If not specified, the network will be run with dummy "
789 "data (useful for profiling).")
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100790 ("input-type,y",po::value(&inputTypes), "The type of the input tensors in the network separated by comma. "
791 "If unset, defaults to \"float\" for all defined inputs. "
792 "Accepted values (float, int or qasymm8).")
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100793 ("quantize-input,q",po::bool_switch()->default_value(false),
794 "If this option is enabled, all float inputs will be quantized to qasymm8. "
795 "If unset, default to not quantized. "
796 "Accepted values (true or false)")
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100797 ("output-type,z",po::value(&outputTypes), "The type of the output tensors in the network separated by comma. "
798 "If unset, defaults to \"float\" for all defined outputs. "
799 "Accepted values (float, int or qasymm8).")
800 ("output-name,o", po::value(&outputNames),
Sadik Armagan77086282019-09-02 11:46:28 +0100801 "Identifier of the output tensors in the network separated by comma.")
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000802 ("dequantize-output,l",po::bool_switch()->default_value(false),
803 "If this option is enabled, all quantized outputs will be dequantized to float. "
804 "If unset, default to not get dequantized. "
805 "Accepted values (true or false)")
Sadik Armagan77086282019-09-02 11:46:28 +0100806 ("write-outputs-to-file,w", po::value(&outputTensorFiles),
807 "Comma-separated list of output file paths keyed with the binding-id of the output slot. "
808 "If left empty (the default), the output tensors will not be written to a file.");
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100809 }
810 catch (const std::exception& e)
811 {
812 // Coverity points out that default_value(...) can throw a bad_lexical_cast,
813 // and that desc.add_options() can throw boost::io::too_few_args.
814 // They really won't in any of these cases.
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100815 ARMNN_ASSERT_MSG(false, "Caught unexpected exception");
Derek Lamberti08446972019-11-26 16:38:31 +0000816 ARMNN_LOG(fatal) << "Fatal internal error: " << e.what();
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100817 return EXIT_FAILURE;
818 }
819
820 std::vector<const char*> clOptions;
821 clOptions.reserve(csvRow.values.size());
822 for (const std::string& value : csvRow.values)
823 {
824 clOptions.push_back(value.c_str());
825 }
826
827 po::variables_map vm;
828 try
829 {
830 po::store(po::parse_command_line(static_cast<int>(clOptions.size()), clOptions.data(), desc), vm);
831
832 po::notify(vm);
833
834 CheckOptionDependencies(vm);
835 }
836 catch (const po::error& e)
837 {
838 std::cerr << e.what() << std::endl << std::endl;
839 std::cerr << desc << std::endl;
840 return EXIT_FAILURE;
841 }
842
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100843 // Get the value of the switch arguments.
844 bool quantizeInput = vm["quantize-input"].as<bool>();
Georgios Pinitas50311ba2020-02-18 13:25:23 +0000845 bool dequantizeOutput = vm["dequantize-output"].as<bool>();
Narumol Prangnawarat610256f2019-06-26 15:10:46 +0100846
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100847 // Get the preferred order of compute devices.
848 std::vector<armnn::BackendId> computeDevices = vm["compute"].as<std::vector<armnn::BackendId>>();
849
850 // Remove duplicates from the list of compute devices.
851 RemoveDuplicateDevices(computeDevices);
852
853 // Check that the specified compute devices are valid.
854 std::string invalidBackends;
855 if (!CheckRequestedBackendsAreValid(computeDevices, armnn::Optional<std::string&>(invalidBackends)))
856 {
Derek Lamberti08446972019-11-26 16:38:31 +0000857 ARMNN_LOG(fatal) << "The list of preferred devices contains invalid backend IDs: "
Francis Murtaghbee4bc92019-06-18 12:30:37 +0100858 << invalidBackends;
859 return EXIT_FAILURE;
860 }
861
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100862 return RunTest(modelFormat, inputTensorShapes, computeDevices, dynamicBackendsPath, modelPath, inputNames,
Sadik Armagan77086282019-09-02 11:46:28 +0100863 inputTensorDataFilePaths, inputTypes, quantizeInput, outputTypes, outputNames, outputTensorFiles,
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000864 dequantizeOutput, enableProfiling, enableFp16TurboMode, enableBf16TurboMode,
865 thresholdTime, printIntermediate, subgraphId, enableLayerDetails, parseUnuspported);
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100866}