blob: bbab70b39a5c97d1ec5fe1587ba152c0e5717091 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +01005#include <armnn/ArmNN.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +01006#include <armnn/TypesUtils.hpp>
7
telsoa014fcda012018-03-09 14:13:49 +00008#if defined(ARMNN_CAFFE_PARSER)
9#include "armnnCaffeParser/ICaffeParser.hpp"
10#endif
surmeh01bceff2f2018-03-29 16:29:27 +010011#if defined(ARMNN_TF_PARSER)
12#include "armnnTfParser/ITfParser.hpp"
13#endif
telsoa01c577f2c2018-08-31 09:22:23 +010014#if defined(ARMNN_TF_LITE_PARSER)
15#include "armnnTfLiteParser/ITfLiteParser.hpp"
16#endif
17#if defined(ARMNN_ONNX_PARSER)
18#include "armnnOnnxParser/IOnnxParser.hpp"
19#endif
20#include "CsvReader.hpp"
telsoa014fcda012018-03-09 14:13:49 +000021#include "../InferenceTest.hpp"
22
telsoa01c577f2c2018-08-31 09:22:23 +010023#include <Logging.hpp>
24#include <Profiling.hpp>
25
26#include <boost/algorithm/string/trim.hpp>
telsoa014fcda012018-03-09 14:13:49 +000027#include <boost/algorithm/string/split.hpp>
28#include <boost/algorithm/string/classification.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010029#include <boost/program_options.hpp>
Ferran Balaguerc602f292019-02-08 17:09:55 +000030#include <boost/variant.hpp>
telsoa014fcda012018-03-09 14:13:49 +000031
32#include <iostream>
33#include <fstream>
telsoa01c577f2c2018-08-31 09:22:23 +010034#include <functional>
35#include <future>
36#include <algorithm>
37#include <iterator>
telsoa014fcda012018-03-09 14:13:49 +000038
39namespace
40{
41
telsoa01c577f2c2018-08-31 09:22:23 +010042// Configure boost::program_options for command-line parsing and validation.
43namespace po = boost::program_options;
44
telsoa014fcda012018-03-09 14:13:49 +000045template<typename T, typename TParseElementFunc>
Ferran Balaguerc602f292019-02-08 17:09:55 +000046std::vector<T> ParseArrayImpl(std::istream& stream, TParseElementFunc parseElementFunc, const char * chars = "\t ,:")
telsoa014fcda012018-03-09 14:13:49 +000047{
48 std::vector<T> result;
telsoa01c577f2c2018-08-31 09:22:23 +010049 // Processes line-by-line.
telsoa014fcda012018-03-09 14:13:49 +000050 std::string line;
51 while (std::getline(stream, line))
52 {
53 std::vector<std::string> tokens;
surmeh013537c2c2018-05-18 16:31:43 +010054 try
55 {
56 // Coverity fix: boost::split() may throw an exception of type boost::bad_function_call.
Ferran Balaguerc602f292019-02-08 17:09:55 +000057 boost::split(tokens, line, boost::algorithm::is_any_of(chars), boost::token_compress_on);
surmeh013537c2c2018-05-18 16:31:43 +010058 }
59 catch (const std::exception& e)
60 {
61 BOOST_LOG_TRIVIAL(error) << "An error occurred when splitting tokens: " << e.what();
62 continue;
63 }
telsoa014fcda012018-03-09 14:13:49 +000064 for (const std::string& token : tokens)
65 {
66 if (!token.empty()) // See https://stackoverflow.com/questions/10437406/
67 {
68 try
69 {
70 result.push_back(parseElementFunc(token));
71 }
72 catch (const std::exception&)
73 {
74 BOOST_LOG_TRIVIAL(error) << "'" << token << "' is not a valid number. It has been ignored.";
75 }
76 }
77 }
78 }
79
80 return result;
81}
82
telsoa01c577f2c2018-08-31 09:22:23 +010083bool CheckOption(const po::variables_map& vm,
84 const char* option)
85{
86 // Check that the given option is valid.
87 if (option == nullptr)
88 {
89 return false;
90 }
91
92 // Check whether 'option' is provided.
93 return vm.find(option) != vm.end();
94}
95
96void CheckOptionDependency(const po::variables_map& vm,
97 const char* option,
98 const char* required)
99{
100 // Check that the given options are valid.
101 if (option == nullptr || required == nullptr)
102 {
103 throw po::error("Invalid option to check dependency for");
104 }
105
106 // Check that if 'option' is provided, 'required' is also provided.
107 if (CheckOption(vm, option) && !vm[option].defaulted())
108 {
109 if (CheckOption(vm, required) == 0 || vm[required].defaulted())
110 {
111 throw po::error(std::string("Option '") + option + "' requires option '" + required + "'.");
112 }
113 }
114}
115
116void CheckOptionDependencies(const po::variables_map& vm)
117{
118 CheckOptionDependency(vm, "model-path", "model-format");
119 CheckOptionDependency(vm, "model-path", "input-name");
120 CheckOptionDependency(vm, "model-path", "input-tensor-data");
121 CheckOptionDependency(vm, "model-path", "output-name");
122 CheckOptionDependency(vm, "input-tensor-shape", "model-path");
telsoa014fcda012018-03-09 14:13:49 +0000123}
124
125template<typename T>
126std::vector<T> ParseArray(std::istream& stream);
127
128template<>
129std::vector<float> ParseArray(std::istream& stream)
130{
131 return ParseArrayImpl<float>(stream, [](const std::string& s) { return std::stof(s); });
132}
133
134template<>
135std::vector<unsigned int> ParseArray(std::istream& stream)
136{
137 return ParseArrayImpl<unsigned int>(stream,
138 [](const std::string& s) { return boost::numeric_cast<unsigned int>(std::stoi(s)); });
139}
140
Ferran Balaguerc602f292019-02-08 17:09:55 +0000141template<>
142std::vector<int> ParseArray(std::istream& stream)
telsoa014fcda012018-03-09 14:13:49 +0000143{
Ferran Balaguerc602f292019-02-08 17:09:55 +0000144 return ParseArrayImpl<int>(stream, [](const std::string& s) { return std::stoi(s); });
145}
146
147std::vector<std::string> ParseInputString(const std::string& inputString, const char * chars)
148{
149 std::stringstream stream(inputString);
150
151 return ParseArrayImpl<std::string>(stream, [](const std::string& s) { return boost::trim_copy(s); }, chars);
telsoa014fcda012018-03-09 14:13:49 +0000152}
153
David Beckf0b48452018-10-19 15:20:56 +0100154void RemoveDuplicateDevices(std::vector<armnn::BackendId>& computeDevices)
telsoa014fcda012018-03-09 14:13:49 +0000155{
telsoa01c577f2c2018-08-31 09:22:23 +0100156 // Mark the duplicate devices as 'Undefined'.
157 for (auto i = computeDevices.begin(); i != computeDevices.end(); ++i)
158 {
159 for (auto j = std::next(i); j != computeDevices.end(); ++j)
160 {
161 if (*j == *i)
162 {
163 *j = armnn::Compute::Undefined;
164 }
165 }
166 }
167
168 // Remove 'Undefined' devices.
169 computeDevices.erase(std::remove(computeDevices.begin(), computeDevices.end(), armnn::Compute::Undefined),
170 computeDevices.end());
171}
172
telsoa01c577f2c2018-08-31 09:22:23 +0100173} // namespace
174
175template<typename TParser, typename TDataType>
176int MainImpl(const char* modelPath,
177 bool isModelBinary,
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000178 const std::vector<armnn::BackendId>& computeDevices,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000179 const std::vector<string>& inputNames,
180 const std::vector<std::unique_ptr<armnn::TensorShape>>& inputTensorShapes,
181 const std::vector<string>& inputTensorDataFilePaths,
182 const std::vector<string>& inputTypes,
183 const std::vector<string>& outputNames,
telsoa01c577f2c2018-08-31 09:22:23 +0100184 bool enableProfiling,
185 const size_t subgraphId,
186 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
187{
Ferran Balaguerc602f292019-02-08 17:09:55 +0000188 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000189
Ferran Balaguerc602f292019-02-08 17:09:55 +0000190 std::vector<TContainer> inputDataContainers;
191
192 for(unsigned int i = 0; i < inputTensorDataFilePaths.size(); ++i)
telsoa014fcda012018-03-09 14:13:49 +0000193 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000194 std::ifstream inputTensorFile(inputTensorDataFilePaths[i]);
195
196 if (inputTypes[i].compare("float") == 0)
telsoa014fcda012018-03-09 14:13:49 +0000197 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000198 inputDataContainers.push_back(ParseArray<float>(inputTensorFile));
199 }
200 else if (inputTypes[i].compare("int") == 0)
201 {
202 inputDataContainers.push_back(ParseArray<int>(inputTensorFile));;
203 }
204 else
205 {
206 BOOST_LOG_TRIVIAL(fatal) << "Unsupported tensor data type \"" << inputTypes[i] << "\". ";
telsoa01c577f2c2018-08-31 09:22:23 +0100207 return EXIT_FAILURE;
telsoa014fcda012018-03-09 14:13:49 +0000208 }
Ferran Balaguerc602f292019-02-08 17:09:55 +0000209
210 inputTensorFile.close();
telsoa014fcda012018-03-09 14:13:49 +0000211 }
212
213 try
214 {
telsoa01c577f2c2018-08-31 09:22:23 +0100215 // Creates an InferenceModel, which will parse the model and load it into an IRuntime.
telsoa014fcda012018-03-09 14:13:49 +0000216 typename InferenceModel<TParser, TDataType>::Params params;
217 params.m_ModelPath = modelPath;
218 params.m_IsModelBinary = isModelBinary;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000219 params.m_ComputeDevices = computeDevices;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000220
221 for(const std::string& inputName: inputNames)
222 {
223 params.m_InputBindings.push_back(inputName);
224 }
225
226 for(unsigned int i = 0; i < inputTensorShapes.size(); ++i)
227 {
228 params.m_InputShapes.push_back(*inputTensorShapes[i]);
229 }
230
231 for(const std::string& outputName: outputNames)
232 {
233 params.m_OutputBindings.push_back(outputName);
234 }
235
telsoa01c577f2c2018-08-31 09:22:23 +0100236 params.m_EnableProfiling = enableProfiling;
237 params.m_SubgraphId = subgraphId;
238 InferenceModel<TParser, TDataType> model(params, runtime);
telsoa014fcda012018-03-09 14:13:49 +0000239
Ferran Balaguerc602f292019-02-08 17:09:55 +0000240 const size_t numOutputs = params.m_OutputBindings.size();
241 std::vector<TContainer> outputDataContainers;
Aron Virginas-Tar9b937472019-01-30 17:41:47 +0000242
Ferran Balaguerc602f292019-02-08 17:09:55 +0000243 for (unsigned int i = 0; i < numOutputs; ++i)
244 {
245 outputDataContainers.push_back(std::vector<float>(model.GetOutputSize(i)));
246 }
Aron Virginas-Tar93f5f972019-01-31 13:12:34 +0000247
Ferran Balaguerc602f292019-02-08 17:09:55 +0000248 model.Run(inputDataContainers, outputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000249
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000250 // Print output tensors
251 for (size_t i = 0; i < numOutputs; i++)
252 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000253 boost::apply_visitor([&](auto&& value)
254 {
255 std::cout << params.m_OutputBindings[i] << ": ";
256 for (size_t i = 0; i < value.size(); ++i)
257 {
258 printf("%f ", static_cast<float>(value[i]));
259 }
260 printf("\n");
261 },
262 outputDataContainers[i]);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000263 }
telsoa014fcda012018-03-09 14:13:49 +0000264 }
265 catch (armnn::Exception const& e)
266 {
267 BOOST_LOG_TRIVIAL(fatal) << "Armnn Error: " << e.what();
telsoa01c577f2c2018-08-31 09:22:23 +0100268 return EXIT_FAILURE;
telsoa014fcda012018-03-09 14:13:49 +0000269 }
270
telsoa01c577f2c2018-08-31 09:22:23 +0100271 return EXIT_SUCCESS;
telsoa014fcda012018-03-09 14:13:49 +0000272}
273
telsoa01c577f2c2018-08-31 09:22:23 +0100274// This will run a test
Ferran Balaguerc602f292019-02-08 17:09:55 +0000275int RunTest(const std::string& format,
276 const std::string& inputTensorShapesStr,
David Beckf0b48452018-10-19 15:20:56 +0100277 const vector<armnn::BackendId>& computeDevice,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000278 const std::string& path,
279 const std::string& inputNames,
280 const std::string& inputTensorDataFilePaths,
281 const std::string& inputTypes,
282 const std::string& outputNames,
telsoa01c577f2c2018-08-31 09:22:23 +0100283 bool enableProfiling,
284 const size_t subgraphId,
285 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
telsoa014fcda012018-03-09 14:13:49 +0000286{
Ferran Balaguerc602f292019-02-08 17:09:55 +0000287 std::string modelFormat = boost::trim_copy(format);
288 std::string modelPath = boost::trim_copy(path);
289 std::vector<std::string> inputNamesVector = ParseInputString(inputNames, ",");
290 std::vector<std::string> inputTensorShapesVector = ParseInputString(inputTensorShapesStr, ";");
291 std::vector<std::string> inputTensorDataFilePathsVector = ParseInputString(inputTensorDataFilePaths, ",");
292 std::vector<std::string> outputNamesVector = ParseInputString(outputNames, ",");
293 std::vector<std::string> inputTypesVector = ParseInputString(inputTypes, ",");
294
telsoa014fcda012018-03-09 14:13:49 +0000295 // Parse model binary flag from the model-format string we got from the command-line
296 bool isModelBinary;
297 if (modelFormat.find("bin") != std::string::npos)
298 {
299 isModelBinary = true;
300 }
301 else if (modelFormat.find("txt") != std::string::npos || modelFormat.find("text") != std::string::npos)
302 {
303 isModelBinary = false;
304 }
305 else
306 {
307 BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
telsoa01c577f2c2018-08-31 09:22:23 +0100308 return EXIT_FAILURE;
telsoa014fcda012018-03-09 14:13:49 +0000309 }
310
Ferran Balaguerc602f292019-02-08 17:09:55 +0000311 if ((inputTensorShapesVector.size() != 0) && (inputTensorShapesVector.size() != inputNamesVector.size()))
telsoa014fcda012018-03-09 14:13:49 +0000312 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000313 BOOST_LOG_TRIVIAL(fatal) << "input-name and input-tensor-shape must have the same amount of elements.";
314 return EXIT_FAILURE;
315 }
surmeh013537c2c2018-05-18 16:31:43 +0100316
Ferran Balaguerc602f292019-02-08 17:09:55 +0000317 if ((inputTensorDataFilePathsVector.size() != 0) &&
318 (inputTensorDataFilePathsVector.size() != inputNamesVector.size()))
319 {
320 BOOST_LOG_TRIVIAL(fatal) << "input-name and input-tensor-data must have the same amount of elements.";
321 return EXIT_FAILURE;
322 }
323
324 if (inputTypesVector.size() == 0)
325 {
326 //Defaults the value of all inputs to "float"
327 for(unsigned int i = 0; i < inputNamesVector.size(); ++i)
surmeh013537c2c2018-05-18 16:31:43 +0100328 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000329 inputTypesVector.push_back("float");
surmeh013537c2c2018-05-18 16:31:43 +0100330 }
Ferran Balaguerc602f292019-02-08 17:09:55 +0000331 }
332 else if ((inputTypesVector.size() != 0) && (inputTypesVector.size() != inputNamesVector.size()))
333 {
334 BOOST_LOG_TRIVIAL(fatal) << "input-name and input-type must have the same amount of elements.";
335 return EXIT_FAILURE;
336 }
337
338 // Parse input tensor shape from the string we got from the command-line.
339 std::vector<std::unique_ptr<armnn::TensorShape>> inputTensorShapes;
340
341 if (!inputTensorShapesVector.empty())
342 {
343 inputTensorShapes.reserve(inputTensorShapesVector.size());
344
345 for(const std::string& shape : inputTensorShapesVector)
surmeh013537c2c2018-05-18 16:31:43 +0100346 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000347 std::stringstream ss(shape);
348 std::vector<unsigned int> dims = ParseArray<unsigned int>(ss);
349
350 try
351 {
352 // Coverity fix: An exception of type armnn::InvalidArgumentException is thrown and never caught.
353 inputTensorShapes.push_back(std::make_unique<armnn::TensorShape>(dims.size(), dims.data()));
354 }
355 catch (const armnn::InvalidArgumentException& e)
356 {
357 BOOST_LOG_TRIVIAL(fatal) << "Cannot create tensor shape: " << e.what();
358 return EXIT_FAILURE;
359 }
surmeh013537c2c2018-05-18 16:31:43 +0100360 }
telsoa014fcda012018-03-09 14:13:49 +0000361 }
362
363 // Forward to implementation based on the parser type
364 if (modelFormat.find("caffe") != std::string::npos)
365 {
366#if defined(ARMNN_CAFFE_PARSER)
367 return MainImpl<armnnCaffeParser::ICaffeParser, float>(modelPath.c_str(), isModelBinary, computeDevice,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000368 inputNamesVector, inputTensorShapes,
369 inputTensorDataFilePathsVector, inputTypesVector,
370 outputNamesVector, enableProfiling, subgraphId, runtime);
telsoa014fcda012018-03-09 14:13:49 +0000371#else
372 BOOST_LOG_TRIVIAL(fatal) << "Not built with Caffe parser support.";
telsoa01c577f2c2018-08-31 09:22:23 +0100373 return EXIT_FAILURE;
374#endif
375 }
376 else if (modelFormat.find("onnx") != std::string::npos)
377{
378#if defined(ARMNN_ONNX_PARSER)
379 return MainImpl<armnnOnnxParser::IOnnxParser, float>(modelPath.c_str(), isModelBinary, computeDevice,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000380 inputNamesVector, inputTensorShapes,
381 inputTensorDataFilePathsVector, inputTypesVector,
382 outputNamesVector, enableProfiling, subgraphId, runtime);
telsoa01c577f2c2018-08-31 09:22:23 +0100383#else
384 BOOST_LOG_TRIVIAL(fatal) << "Not built with Onnx parser support.";
385 return EXIT_FAILURE;
telsoa014fcda012018-03-09 14:13:49 +0000386#endif
387 }
388 else if (modelFormat.find("tensorflow") != std::string::npos)
389 {
surmeh01bceff2f2018-03-29 16:29:27 +0100390#if defined(ARMNN_TF_PARSER)
391 return MainImpl<armnnTfParser::ITfParser, float>(modelPath.c_str(), isModelBinary, computeDevice,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000392 inputNamesVector, inputTensorShapes,
393 inputTensorDataFilePathsVector, inputTypesVector,
394 outputNamesVector, enableProfiling, subgraphId, runtime);
surmeh01bceff2f2018-03-29 16:29:27 +0100395#else
telsoa014fcda012018-03-09 14:13:49 +0000396 BOOST_LOG_TRIVIAL(fatal) << "Not built with Tensorflow parser support.";
telsoa01c577f2c2018-08-31 09:22:23 +0100397 return EXIT_FAILURE;
398#endif
399 }
400 else if(modelFormat.find("tflite") != std::string::npos)
401 {
402#if defined(ARMNN_TF_LITE_PARSER)
403 if (! isModelBinary)
404 {
405 BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
406 for tflite files";
407 return EXIT_FAILURE;
408 }
409 return MainImpl<armnnTfLiteParser::ITfLiteParser, float>(modelPath.c_str(), isModelBinary, computeDevice,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000410 inputNamesVector, inputTensorShapes,
411 inputTensorDataFilePathsVector, inputTypesVector,
412 outputNamesVector, enableProfiling, subgraphId,
413 runtime);
telsoa01c577f2c2018-08-31 09:22:23 +0100414#else
415 BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat <<
416 "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
417 return EXIT_FAILURE;
surmeh01bceff2f2018-03-29 16:29:27 +0100418#endif
telsoa014fcda012018-03-09 14:13:49 +0000419 }
420 else
421 {
422 BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat <<
telsoa01c577f2c2018-08-31 09:22:23 +0100423 "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
424 return EXIT_FAILURE;
425 }
426}
427
428int RunCsvTest(const armnnUtils::CsvRow &csvRow,
Nina Drozd549ae372018-09-10 14:26:44 +0100429 const std::shared_ptr<armnn::IRuntime>& runtime, const bool enableProfiling)
telsoa01c577f2c2018-08-31 09:22:23 +0100430{
431 std::string modelFormat;
432 std::string modelPath;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000433 std::string inputNames;
434 std::string inputTensorShapes;
435 std::string inputTensorDataFilePaths;
436 std::string outputNames;
437 std::string inputTypes;
telsoa01c577f2c2018-08-31 09:22:23 +0100438
439 size_t subgraphId = 0;
440
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100441 const std::string backendsMessage = std::string("The preferred order of devices to run layers on by default. ")
442 + std::string("Possible choices: ")
443 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
444
telsoa01c577f2c2018-08-31 09:22:23 +0100445 po::options_description desc("Options");
446 try
447 {
448 desc.add_options()
449 ("model-format,f", po::value(&modelFormat),
450 "caffe-binary, caffe-text, tflite-binary, onnx-binary, onnx-text, tensorflow-binary or tensorflow-text.")
451 ("model-path,m", po::value(&modelPath), "Path to model file, e.g. .caffemodel, .prototxt, .tflite,"
452 " .onnx")
David Beckf0b48452018-10-19 15:20:56 +0100453 ("compute,c", po::value<std::vector<armnn::BackendId>>()->multitoken(),
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100454 backendsMessage.c_str())
Ferran Balaguerc602f292019-02-08 17:09:55 +0000455 ("input-name,i", po::value(&inputNames), "Identifier of the input tensors in the network separated by comma.")
telsoa01c577f2c2018-08-31 09:22:23 +0100456 ("subgraph-number,n", po::value<size_t>(&subgraphId)->default_value(0), "Id of the subgraph to be "
Ferran Balaguerc602f292019-02-08 17:09:55 +0000457 "executed. Defaults to 0.")
458 ("input-tensor-shape,s", po::value(&inputTensorShapes),
459 "The shape of the input tensors in the network as a flat array of integers separated by comma. "
460 "Several shapes can be passed separating them by semicolon. "
telsoa01c577f2c2018-08-31 09:22:23 +0100461 "This parameter is optional, depending on the network.")
Ferran Balaguerc602f292019-02-08 17:09:55 +0000462 ("input-tensor-data,d", po::value(&inputTensorDataFilePaths),
463 "Path to files containing the input data as a flat array separated by whitespace. "
464 "Several paths can be passed separating them by comma.")
465 ("input-type,y",po::value(&inputTypes), "The type of the input tensors in the network separated by comma. "
466 "If unset, defaults to \"float\" for all defined inputs. "
467 "Accepted values (float or int).")
468 ("output-name,o", po::value(&outputNames),
469 "Identifier of the output tensors in the network separated by comma.");
telsoa01c577f2c2018-08-31 09:22:23 +0100470 }
471 catch (const std::exception& e)
472 {
473 // Coverity points out that default_value(...) can throw a bad_lexical_cast,
474 // and that desc.add_options() can throw boost::io::too_few_args.
475 // They really won't in any of these cases.
476 BOOST_ASSERT_MSG(false, "Caught unexpected exception");
477 BOOST_LOG_TRIVIAL(fatal) << "Fatal internal error: " << e.what();
478 return EXIT_FAILURE;
479 }
480
481 std::vector<const char*> clOptions;
482 clOptions.reserve(csvRow.values.size());
483 for (const std::string& value : csvRow.values)
484 {
485 clOptions.push_back(value.c_str());
486 }
487
488 po::variables_map vm;
489 try
490 {
491 po::store(po::parse_command_line(static_cast<int>(clOptions.size()), clOptions.data(), desc), vm);
492
493 po::notify(vm);
494
495 CheckOptionDependencies(vm);
496 }
497 catch (const po::error& e)
498 {
499 std::cerr << e.what() << std::endl << std::endl;
500 std::cerr << desc << std::endl;
501 return EXIT_FAILURE;
502 }
503
telsoa01c577f2c2018-08-31 09:22:23 +0100504 // Get the preferred order of compute devices.
David Beckf0b48452018-10-19 15:20:56 +0100505 std::vector<armnn::BackendId> computeDevices = vm["compute"].as<std::vector<armnn::BackendId>>();
telsoa01c577f2c2018-08-31 09:22:23 +0100506
507 // Remove duplicates from the list of compute devices.
508 RemoveDuplicateDevices(computeDevices);
509
510 // Check that the specified compute devices are valid.
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100511 std::string invalidBackends;
512 if (!CheckRequestedBackendsAreValid(computeDevices, armnn::Optional<std::string&>(invalidBackends)))
telsoa01c577f2c2018-08-31 09:22:23 +0100513 {
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100514 BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: "
515 << invalidBackends;
telsoa01c577f2c2018-08-31 09:22:23 +0100516 return EXIT_FAILURE;
517 }
518
Ferran Balaguerc602f292019-02-08 17:09:55 +0000519 return RunTest(modelFormat, inputTensorShapes, computeDevices, modelPath, inputNames, inputTensorDataFilePaths,
520 inputTypes, outputNames, enableProfiling, subgraphId);
telsoa01c577f2c2018-08-31 09:22:23 +0100521}
522
523int main(int argc, const char* argv[])
524{
525 // Configures logging for both the ARMNN library and this test program.
526#ifdef NDEBUG
527 armnn::LogSeverity level = armnn::LogSeverity::Info;
528#else
529 armnn::LogSeverity level = armnn::LogSeverity::Debug;
530#endif
531 armnn::ConfigureLogging(true, true, level);
532 armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level);
533
534 std::string testCasesFile;
535
536 std::string modelFormat;
537 std::string modelPath;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000538 std::string inputNames;
539 std::string inputTensorShapes;
540 std::string inputTensorDataFilePaths;
541 std::string outputNames;
542 std::string inputTypes;
telsoa01c577f2c2018-08-31 09:22:23 +0100543
544 size_t subgraphId = 0;
545
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100546 const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
547 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
548
telsoa01c577f2c2018-08-31 09:22:23 +0100549 po::options_description desc("Options");
550 try
551 {
552 desc.add_options()
553 ("help", "Display usage information")
554 ("test-cases,t", po::value(&testCasesFile), "Path to a CSV file containing test cases to run. "
555 "If set, further parameters -- with the exception of compute device and concurrency -- will be ignored, "
556 "as they are expected to be defined in the file for each test in particular.")
557 ("concurrent,n", po::bool_switch()->default_value(false),
558 "Whether or not the test cases should be executed in parallel")
Matteo Martincigh49124022019-01-11 13:25:59 +0000559 ("model-format,f", po::value(&modelFormat)->required(),
telsoa01c577f2c2018-08-31 09:22:23 +0100560 "caffe-binary, caffe-text, onnx-binary, onnx-text, tflite-binary, tensorflow-binary or tensorflow-text.")
Matteo Martincigh49124022019-01-11 13:25:59 +0000561 ("model-path,m", po::value(&modelPath)->required(), "Path to model file, e.g. .caffemodel, .prototxt,"
telsoa01c577f2c2018-08-31 09:22:23 +0100562 " .tflite, .onnx")
David Beckf0b48452018-10-19 15:20:56 +0100563 ("compute,c", po::value<std::vector<std::string>>()->multitoken(),
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100564 backendsMessage.c_str())
Ferran Balaguerc602f292019-02-08 17:09:55 +0000565 ("input-name,i", po::value(&inputNames),
566 "Identifier of the input tensors in the network separated by comma.")
telsoa01c577f2c2018-08-31 09:22:23 +0100567 ("subgraph-number,x", po::value<size_t>(&subgraphId)->default_value(0), "Id of the subgraph to be executed."
568 "Defaults to 0")
Ferran Balaguerc602f292019-02-08 17:09:55 +0000569 ("input-tensor-shape,s", po::value(&inputTensorShapes),
570 "The shape of the input tensors in the network as a flat array of integers separated by comma. "
571 "Several shapes can be passed separating them by semicolon. "
telsoa01c577f2c2018-08-31 09:22:23 +0100572 "This parameter is optional, depending on the network.")
Ferran Balaguerc602f292019-02-08 17:09:55 +0000573 ("input-tensor-data,d", po::value(&inputTensorDataFilePaths),
574 "Path to files containing the input data as a flat array separated by whitespace. "
575 "Several paths can be passed separating them by comma. ")
576 ("input-type,y",po::value(&inputTypes), "The type of the input tensors in the network separated by comma. "
577 "If unset, defaults to \"float\" for all defined inputs. "
578 "Accepted values (float or int)")
579 ("output-name,o", po::value(&outputNames),
580 "Identifier of the output tensors in the network separated by comma.")
telsoa01c577f2c2018-08-31 09:22:23 +0100581 ("event-based-profiling,e", po::bool_switch()->default_value(false),
582 "Enables built in profiler. If unset, defaults to off.");
583 }
584 catch (const std::exception& e)
585 {
586 // Coverity points out that default_value(...) can throw a bad_lexical_cast,
587 // and that desc.add_options() can throw boost::io::too_few_args.
588 // They really won't in any of these cases.
589 BOOST_ASSERT_MSG(false, "Caught unexpected exception");
590 BOOST_LOG_TRIVIAL(fatal) << "Fatal internal error: " << e.what();
591 return EXIT_FAILURE;
592 }
593
594 // Parses the command-line.
595 po::variables_map vm;
596 try
597 {
598 po::store(po::parse_command_line(argc, argv, desc), vm);
599
600 if (CheckOption(vm, "help") || argc <= 1)
601 {
602 std::cout << "Executes a neural network model using the provided input tensor. " << std::endl;
603 std::cout << "Prints the resulting output tensor." << std::endl;
604 std::cout << std::endl;
605 std::cout << desc << std::endl;
606 return EXIT_SUCCESS;
607 }
608
609 po::notify(vm);
610 }
611 catch (const po::error& e)
612 {
613 std::cerr << e.what() << std::endl << std::endl;
614 std::cerr << desc << std::endl;
615 return EXIT_FAILURE;
616 }
617
618 // Get the value of the switch arguments.
619 bool concurrent = vm["concurrent"].as<bool>();
620 bool enableProfiling = vm["event-based-profiling"].as<bool>();
621
622 // Check whether we have to load test cases from a file.
623 if (CheckOption(vm, "test-cases"))
624 {
625 // Check that the file exists.
626 if (!boost::filesystem::exists(testCasesFile))
627 {
628 BOOST_LOG_TRIVIAL(fatal) << "Given file \"" << testCasesFile << "\" does not exist";
629 return EXIT_FAILURE;
630 }
631
632 // Parse CSV file and extract test cases
633 armnnUtils::CsvReader reader;
634 std::vector<armnnUtils::CsvRow> testCases = reader.ParseFile(testCasesFile);
635
636 // Check that there is at least one test case to run
637 if (testCases.empty())
638 {
639 BOOST_LOG_TRIVIAL(fatal) << "Given file \"" << testCasesFile << "\" has no test cases";
640 return EXIT_FAILURE;
641 }
642
643 // Create runtime
644 armnn::IRuntime::CreationOptions options;
Nina Drozd549ae372018-09-10 14:26:44 +0100645 options.m_EnableGpuProfiling = enableProfiling;
646
telsoa01c577f2c2018-08-31 09:22:23 +0100647 std::shared_ptr<armnn::IRuntime> runtime(armnn::IRuntime::Create(options));
648
649 const std::string executableName("ExecuteNetwork");
650
651 // Check whether we need to run the test cases concurrently
652 if (concurrent)
653 {
654 std::vector<std::future<int>> results;
655 results.reserve(testCases.size());
656
657 // Run each test case in its own thread
658 for (auto& testCase : testCases)
659 {
660 testCase.values.insert(testCase.values.begin(), executableName);
Nina Drozd549ae372018-09-10 14:26:44 +0100661 results.push_back(std::async(std::launch::async, RunCsvTest, std::cref(testCase), std::cref(runtime),
662 enableProfiling));
telsoa01c577f2c2018-08-31 09:22:23 +0100663 }
664
665 // Check results
666 for (auto& result : results)
667 {
668 if (result.get() != EXIT_SUCCESS)
669 {
670 return EXIT_FAILURE;
671 }
672 }
673 }
674 else
675 {
676 // Run tests sequentially
677 for (auto& testCase : testCases)
678 {
679 testCase.values.insert(testCase.values.begin(), executableName);
Nina Drozd549ae372018-09-10 14:26:44 +0100680 if (RunCsvTest(testCase, runtime, enableProfiling) != EXIT_SUCCESS)
telsoa01c577f2c2018-08-31 09:22:23 +0100681 {
682 return EXIT_FAILURE;
683 }
684 }
685 }
686
687 return EXIT_SUCCESS;
688 }
689 else // Run single test
690 {
Aron Virginas-Tar382e21c2019-01-22 14:10:39 +0000691 // Get the preferred order of compute devices. If none are specified, default to using CpuRef
692 const std::string computeOption("compute");
693 std::vector<std::string> computeDevicesAsStrings = CheckOption(vm, computeOption.c_str()) ?
694 vm[computeOption].as<std::vector<std::string>>() :
695 std::vector<std::string>({ "CpuRef" });
Matteo Martincigh067112f2018-10-29 11:01:09 +0000696 std::vector<armnn::BackendId> computeDevices(computeDevicesAsStrings.begin(), computeDevicesAsStrings.end());
telsoa01c577f2c2018-08-31 09:22:23 +0100697
698 // Remove duplicates from the list of compute devices.
699 RemoveDuplicateDevices(computeDevices);
700
701 // Check that the specified compute devices are valid.
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100702 std::string invalidBackends;
703 if (!CheckRequestedBackendsAreValid(computeDevices, armnn::Optional<std::string&>(invalidBackends)))
telsoa01c577f2c2018-08-31 09:22:23 +0100704 {
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100705 BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: "
706 << invalidBackends;
telsoa01c577f2c2018-08-31 09:22:23 +0100707 return EXIT_FAILURE;
708 }
709
710 try
711 {
712 CheckOptionDependencies(vm);
713 }
714 catch (const po::error& e)
715 {
716 std::cerr << e.what() << std::endl << std::endl;
717 std::cerr << desc << std::endl;
718 return EXIT_FAILURE;
719 }
720
Ferran Balaguerc602f292019-02-08 17:09:55 +0000721 return RunTest(modelFormat, inputTensorShapes, computeDevices, modelPath, inputNames, inputTensorDataFilePaths,
722 inputTypes, outputNames, enableProfiling, subgraphId);
telsoa014fcda012018-03-09 14:13:49 +0000723 }
724}