blob: 67246fcd954bf157a8712ef0307737f41f4600ac [file] [log] [blame]
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#include <armnn/ArmNN.hpp>
6
Sadik Armagan232cfc22019-03-13 18:33:10 +00007#if defined(ARMNN_CAFFE_PARSER)
8#include <armnnCaffeParser/ICaffeParser.hpp>
9#endif
10#if defined(ARMNN_ONNX_PARSER)
11#include <armnnOnnxParser/IOnnxParser.hpp>
12#endif
13#if defined(ARMNN_SERIALIZER)
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +000014#include <armnnSerializer/ISerializer.hpp>
Sadik Armagan232cfc22019-03-13 18:33:10 +000015#endif
16#if defined(ARMNN_TF_PARSER)
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +000017#include <armnnTfParser/ITfParser.hpp>
Sadik Armagan232cfc22019-03-13 18:33:10 +000018#endif
19#if defined(ARMNN_TF_LITE_PARSER)
20#include <armnnTfLiteParser/ITfLiteParser.hpp>
21#endif
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +000022
23#include <Logging.hpp>
24#include <HeapProfiling.hpp>
25
26#include <boost/format.hpp>
27#include <boost/algorithm/string/split.hpp>
28#include <boost/algorithm/string/classification.hpp>
29#include <boost/program_options.hpp>
30
Les Bell10e6be42019-03-28 12:26:46 +000031#include <cstdlib>
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +000032#include <fstream>
Les Bell10e6be42019-03-28 12:26:46 +000033#include <iostream>
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +000034
35namespace
36{
37
38namespace po = boost::program_options;
39
40armnn::TensorShape ParseTensorShape(std::istream& stream)
41{
42 std::vector<unsigned int> result;
43 std::string line;
44
45 while (std::getline(stream, line))
46 {
47 std::vector<std::string> tokens;
48 try
49 {
50 // Coverity fix: boost::split() may throw an exception of type boost::bad_function_call.
51 boost::split(tokens, line, boost::algorithm::is_any_of(","), boost::token_compress_on);
52 }
53 catch (const std::exception& e)
54 {
55 BOOST_LOG_TRIVIAL(error) << "An error occurred when splitting tokens: " << e.what();
56 continue;
57 }
58 for (const std::string& token : tokens)
59 {
60 if (!token.empty())
61 {
62 try
63 {
64 result.push_back(boost::numeric_cast<unsigned int>(std::stoi((token))));
65 }
66 catch (const std::exception&)
67 {
68 BOOST_LOG_TRIVIAL(error) << "'" << token << "' is not a valid number. It has been ignored.";
69 }
70 }
71 }
72 }
73
74 return armnn::TensorShape(boost::numeric_cast<unsigned int>(result.size()), result.data());
75}
76
77bool CheckOption(const po::variables_map& vm,
78 const char* option)
79{
80 if (option == nullptr)
81 {
82 return false;
83 }
84
85 // Check whether 'option' is provided.
86 return vm.find(option) != vm.end();
87}
88
89void CheckOptionDependency(const po::variables_map& vm,
90 const char* option,
91 const char* required)
92{
93 if (option == nullptr || required == nullptr)
94 {
95 throw po::error("Invalid option to check dependency for");
96 }
97
98 // Check that if 'option' is provided, 'required' is also provided.
99 if (CheckOption(vm, option) && !vm[option].defaulted())
100 {
101 if (CheckOption(vm, required) == 0 || vm[required].defaulted())
102 {
103 throw po::error(std::string("Option '") + option + "' requires option '" + required + "'.");
104 }
105 }
106}
107
108void CheckOptionDependencies(const po::variables_map& vm)
109{
110 CheckOptionDependency(vm, "model-path", "model-format");
111 CheckOptionDependency(vm, "model-path", "input-name");
112 CheckOptionDependency(vm, "model-path", "output-name");
113 CheckOptionDependency(vm, "input-tensor-shape", "model-path");
114}
115
116int ParseCommandLineArgs(int argc, const char* argv[],
117 std::string& modelFormat,
118 std::string& modelPath,
119 std::vector<std::string>& inputNames,
120 std::vector<std::string>& inputTensorShapeStrs,
121 std::vector<std::string>& outputNames,
122 std::string& outputPath, bool& isModelBinary)
123{
124 po::options_description desc("Options");
125
126 desc.add_options()
127 ("help", "Display usage information")
Sadik Armagan232cfc22019-03-13 18:33:10 +0000128 ("model-format,f", po::value(&modelFormat)->required(),"Format of the model file"
129#if defined(ARMNN_CAFFE_PARSER)
130 ", caffe-binary, caffe-text"
131#endif
132#if defined(ARMNN_ONNX_PARSER)
133 ", onnx-binary, onnx-text"
134#endif
Les Bell10e6be42019-03-28 12:26:46 +0000135#if defined(ARMNN_TF_PARSER)
Sadik Armagan232cfc22019-03-13 18:33:10 +0000136 ", tensorflow-binary, tensorflow-text"
137#endif
138#if defined(ARMNN_TF_LITE_PARSER)
139 ", tflite-binary"
140#endif
141 ".")
Les Bell10e6be42019-03-28 12:26:46 +0000142 ("model-path,m", po::value(&modelPath)->required(), "Path to model file.")
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000143 ("input-name,i", po::value<std::vector<std::string>>()->multitoken(),
Les Bell10e6be42019-03-28 12:26:46 +0000144 "Identifier of the input tensors in the network, separated by whitespace.")
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000145 ("input-tensor-shape,s", po::value<std::vector<std::string>>()->multitoken(),
Les Bell10e6be42019-03-28 12:26:46 +0000146 "The shape of the input tensor in the network as a flat array of integers, separated by comma."
147 " Multiple shapes are separated by whitespace."
148 " This parameter is optional, depending on the network.")
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000149 ("output-name,o", po::value<std::vector<std::string>>()->multitoken(),
150 "Identifier of the output tensor in the network.")
151 ("output-path,p", po::value(&outputPath)->required(), "Path to serialize the network to.");
152
153 po::variables_map vm;
154 try
155 {
156 po::store(po::parse_command_line(argc, argv, desc), vm);
157
158 if (CheckOption(vm, "help") || argc <= 1)
159 {
Les Bell10e6be42019-03-28 12:26:46 +0000160 std::cout << "Convert a neural network model from provided file to ArmNN format." << std::endl;
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000161 std::cout << std::endl;
162 std::cout << desc << std::endl;
Les Bell10e6be42019-03-28 12:26:46 +0000163 exit(EXIT_SUCCESS);
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000164 }
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000165 po::notify(vm);
166 }
167 catch (const po::error& e)
168 {
169 std::cerr << e.what() << std::endl << std::endl;
170 std::cerr << desc << std::endl;
171 return EXIT_FAILURE;
172 }
173
174 try
175 {
176 CheckOptionDependencies(vm);
177 }
178 catch (const po::error& e)
179 {
180 std::cerr << e.what() << std::endl << std::endl;
181 std::cerr << desc << std::endl;
182 return EXIT_FAILURE;
183 }
184
185 if (modelFormat.find("bin") != std::string::npos)
186 {
187 isModelBinary = true;
188 }
Sadik Armagan232cfc22019-03-13 18:33:10 +0000189 else if (modelFormat.find("text") != std::string::npos)
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000190 {
191 isModelBinary = false;
192 }
193 else
194 {
195 BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
196 return EXIT_FAILURE;
197 }
198
199 inputNames = vm["input-name"].as<std::vector<std::string>>();
200 inputTensorShapeStrs = vm["input-tensor-shape"].as<std::vector<std::string>>();
201 outputNames = vm["output-name"].as<std::vector<std::string>>();
202
203 return EXIT_SUCCESS;
204}
205
Sadik Armagan232cfc22019-03-13 18:33:10 +0000206template<typename T>
207struct ParserType
208{
209 typedef T parserType;
210};
211
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000212class ArmnnConverter
213{
214public:
215 ArmnnConverter(const std::string& modelPath,
216 const std::vector<std::string>& inputNames,
217 const std::vector<armnn::TensorShape>& inputShapes,
218 const std::vector<std::string>& outputNames,
219 const std::string& outputPath,
220 bool isModelBinary)
221 : m_NetworkPtr(armnn::INetworkPtr(nullptr, [](armnn::INetwork *){})),
222 m_ModelPath(modelPath),
223 m_InputNames(inputNames),
224 m_InputShapes(inputShapes),
225 m_OutputNames(outputNames),
226 m_OutputPath(outputPath),
227 m_IsModelBinary(isModelBinary) {}
228
229 bool Serialize()
230 {
231 if (m_NetworkPtr.get() == nullptr)
232 {
233 return false;
234 }
235
236 auto serializer(armnnSerializer::ISerializer::Create());
237
238 serializer->Serialize(*m_NetworkPtr);
239
240 std::ofstream file(m_OutputPath, std::ios::out | std::ios::binary);
241
242 bool retVal = serializer->SaveSerializedToStream(file);
243
244 return retVal;
245 }
246
247 template <typename IParser>
248 bool CreateNetwork ()
249 {
Sadik Armagan232cfc22019-03-13 18:33:10 +0000250 return CreateNetwork (ParserType<IParser>());
251 }
252
253private:
254 armnn::INetworkPtr m_NetworkPtr;
255 std::string m_ModelPath;
256 std::vector<std::string> m_InputNames;
257 std::vector<armnn::TensorShape> m_InputShapes;
258 std::vector<std::string> m_OutputNames;
259 std::string m_OutputPath;
260 bool m_IsModelBinary;
261
262 template <typename IParser>
263 bool CreateNetwork (ParserType<IParser>)
264 {
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000265 // Create a network from a file on disk
266 auto parser(IParser::Create());
267
268 std::map<std::string, armnn::TensorShape> inputShapes;
269 if (!m_InputShapes.empty())
270 {
271 const size_t numInputShapes = m_InputShapes.size();
272 const size_t numInputBindings = m_InputNames.size();
273 if (numInputShapes < numInputBindings)
274 {
275 throw armnn::Exception(boost::str(boost::format(
276 "Not every input has its tensor shape specified: expected=%1%, got=%2%")
277 % numInputBindings % numInputShapes));
278 }
279
280 for (size_t i = 0; i < numInputShapes; i++)
281 {
282 inputShapes[m_InputNames[i]] = m_InputShapes[i];
283 }
284 }
285
286 {
287 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
288 m_NetworkPtr = (m_IsModelBinary ?
289 parser->CreateNetworkFromBinaryFile(m_ModelPath.c_str(), inputShapes, m_OutputNames) :
290 parser->CreateNetworkFromTextFile(m_ModelPath.c_str(), inputShapes, m_OutputNames));
291 }
292
293 return m_NetworkPtr.get() != nullptr;
294 }
295
Sadik Armagan232cfc22019-03-13 18:33:10 +0000296#if defined(ARMNN_TF_LITE_PARSER)
297 bool CreateNetwork (ParserType<armnnTfLiteParser::ITfLiteParser>)
298 {
299 // Create a network from a file on disk
300 auto parser(armnnTfLiteParser::ITfLiteParser::Create());
301
302 if (!m_InputShapes.empty())
303 {
304 const size_t numInputShapes = m_InputShapes.size();
305 const size_t numInputBindings = m_InputNames.size();
306 if (numInputShapes < numInputBindings)
307 {
308 throw armnn::Exception(boost::str(boost::format(
309 "Not every input has its tensor shape specified: expected=%1%, got=%2%")
310 % numInputBindings % numInputShapes));
311 }
312 }
313
314 {
315 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
316 m_NetworkPtr = parser->CreateNetworkFromBinaryFile(m_ModelPath.c_str());
317 }
318
319 return m_NetworkPtr.get() != nullptr;
320 }
321#endif
322
323#if defined(ARMNN_ONNX_PARSER)
324 bool CreateNetwork (ParserType<armnnOnnxParser::IOnnxParser>)
325 {
326 // Create a network from a file on disk
327 auto parser(armnnOnnxParser::IOnnxParser::Create());
328
329 if (!m_InputShapes.empty())
330 {
331 const size_t numInputShapes = m_InputShapes.size();
332 const size_t numInputBindings = m_InputNames.size();
333 if (numInputShapes < numInputBindings)
334 {
335 throw armnn::Exception(boost::str(boost::format(
336 "Not every input has its tensor shape specified: expected=%1%, got=%2%")
337 % numInputBindings % numInputShapes));
338 }
339 }
340
341 {
342 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
343 m_NetworkPtr = (m_IsModelBinary ?
344 parser->CreateNetworkFromBinaryFile(m_ModelPath.c_str()) :
345 parser->CreateNetworkFromTextFile(m_ModelPath.c_str()));
346 }
347
348 return m_NetworkPtr.get() != nullptr;
349 }
350#endif
351
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000352};
353
354} // anonymous namespace
355
356int main(int argc, const char* argv[])
357{
358
Sadik Armagan232cfc22019-03-13 18:33:10 +0000359#if (!defined(ARMNN_CAFFE_PARSER) \
360 && !defined(ARMNN_ONNX_PARSER) \
361 && !defined(ARMNN_TF_PARSER) \
362 && !defined(ARMNN_TF_LITE_PARSER))
363 BOOST_LOG_TRIVIAL(fatal) << "Not built with any of the supported parsers, Caffe, Onnx, Tensorflow, or TfLite.";
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000364 return EXIT_FAILURE;
365#endif
366
367#if !defined(ARMNN_SERIALIZER)
368 BOOST_LOG_TRIVIAL(fatal) << "Not built with Serializer support.";
369 return EXIT_FAILURE;
370#endif
371
372#ifdef NDEBUG
373 armnn::LogSeverity level = armnn::LogSeverity::Info;
374#else
375 armnn::LogSeverity level = armnn::LogSeverity::Debug;
376#endif
377
378 armnn::ConfigureLogging(true, true, level);
379 armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level);
380
381 std::string modelFormat;
382 std::string modelPath;
383
384 std::vector<std::string> inputNames;
385 std::vector<std::string> inputTensorShapeStrs;
386 std::vector<armnn::TensorShape> inputTensorShapes;
387
388 std::vector<std::string> outputNames;
389 std::string outputPath;
390
391 bool isModelBinary = true;
392
393 if (ParseCommandLineArgs(
394 argc, argv, modelFormat, modelPath, inputNames, inputTensorShapeStrs, outputNames, outputPath, isModelBinary)
395 != EXIT_SUCCESS)
396 {
397 return EXIT_FAILURE;
398 }
399
400 for (const std::string& shapeStr : inputTensorShapeStrs)
401 {
402 if (!shapeStr.empty())
403 {
404 std::stringstream ss(shapeStr);
405
406 try
407 {
408 armnn::TensorShape shape = ParseTensorShape(ss);
409 inputTensorShapes.push_back(shape);
410 }
411 catch (const armnn::InvalidArgumentException& e)
412 {
413 BOOST_LOG_TRIVIAL(fatal) << "Cannot create tensor shape: " << e.what();
414 return EXIT_FAILURE;
415 }
416 }
417 }
418
419 ArmnnConverter converter(modelPath, inputNames, inputTensorShapes, outputNames, outputPath, isModelBinary);
420
Sadik Armagan232cfc22019-03-13 18:33:10 +0000421 if (modelFormat.find("caffe") != std::string::npos)
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000422 {
Sadik Armagan232cfc22019-03-13 18:33:10 +0000423#if defined(ARMNN_CAFFE_PARSER)
424 if (!converter.CreateNetwork<armnnCaffeParser::ICaffeParser>())
425 {
426 BOOST_LOG_TRIVIAL(fatal) << "Failed to load model from file";
427 return EXIT_FAILURE;
428 }
429#else
430 BOOST_LOG_TRIVIAL(fatal) << "Not built with Caffe parser support.";
431 return EXIT_FAILURE;
432#endif
433 }
434 else if (modelFormat.find("onnx") != std::string::npos)
435 {
436#if defined(ARMNN_ONNX_PARSER)
437 if (!converter.CreateNetwork<armnnOnnxParser::IOnnxParser>())
438 {
439 BOOST_LOG_TRIVIAL(fatal) << "Failed to load model from file";
440 return EXIT_FAILURE;
441 }
442#else
443 BOOST_LOG_TRIVIAL(fatal) << "Not built with Onnx parser support.";
444 return EXIT_FAILURE;
445#endif
446 }
447 else if (modelFormat.find("tensorflow") != std::string::npos)
448 {
449#if defined(ARMNN_TF_PARSER)
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000450 if (!converter.CreateNetwork<armnnTfParser::ITfParser>())
451 {
452 BOOST_LOG_TRIVIAL(fatal) << "Failed to load model from file";
453 return EXIT_FAILURE;
454 }
Sadik Armagan232cfc22019-03-13 18:33:10 +0000455#else
456 BOOST_LOG_TRIVIAL(fatal) << "Not built with Tensorflow parser support.";
457 return EXIT_FAILURE;
458#endif
459 }
460 else if (modelFormat.find("tflite") != std::string::npos)
461 {
462#if defined(ARMNN_TF_LITE_PARSER)
463 if (!isModelBinary)
464 {
465 BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
466 for tflite files";
467 return EXIT_FAILURE;
468 }
469
470 if (!converter.CreateNetwork<armnnTfLiteParser::ITfLiteParser>())
471 {
472 BOOST_LOG_TRIVIAL(fatal) << "Failed to load model from file";
473 return EXIT_FAILURE;
474 }
475#else
476 BOOST_LOG_TRIVIAL(fatal) << "Not built with TfLite parser support.";
477 return EXIT_FAILURE;
478#endif
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000479 }
480 else
481 {
Les Bell10e6be42019-03-28 12:26:46 +0000482 BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'";
Nattapat Chaimanowong4fbae332019-02-14 15:28:02 +0000483 return EXIT_FAILURE;
484 }
485
486 if (!converter.Serialize())
487 {
488 BOOST_LOG_TRIVIAL(fatal) << "Failed to serialize model";
489 return EXIT_FAILURE;
490 }
491
492 return EXIT_SUCCESS;
493}