blob: d2d2ca3c8e5889affb071070030164a3c7809981 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
David Beckf0b48452018-10-19 15:20:56 +01006#include <armnn/ArmNN.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +01007
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +00008#if defined(ARMNN_SERIALIZER)
Derek Lamberti0028d1b2019-02-20 13:57:42 +00009#include "armnnDeserializer/IDeserializer.hpp"
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000010#endif
telsoa01c577f2c2018-08-31 09:22:23 +010011#if defined(ARMNN_TF_LITE_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010012#include <armnnTfLiteParser/ITfLiteParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010013#endif
telsoa01c577f2c2018-08-31 09:22:23 +010014#if defined(ARMNN_ONNX_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010015#include <armnnOnnxParser/IOnnxParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010016#endif
telsoa014fcda012018-03-09 14:13:49 +000017
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000018#include <HeapProfiling.hpp>
19
David Beck1b61be52018-11-08 09:19:14 +000020#include <backendsCommon/BackendRegistry.hpp>
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010021
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000022#include <boost/algorithm/string/join.hpp>
surmeh013537c2c2018-05-18 16:31:43 +010023#include <boost/exception/exception.hpp>
24#include <boost/exception/diagnostic_information.hpp>
telsoa014fcda012018-03-09 14:13:49 +000025#include <boost/log/trivial.hpp>
26#include <boost/format.hpp>
27#include <boost/program_options.hpp>
surmeh013537c2c2018-05-18 16:31:43 +010028#include <boost/filesystem.hpp>
David Beckf0b48452018-10-19 15:20:56 +010029#include <boost/lexical_cast.hpp>
Ferran Balaguerc602f292019-02-08 17:09:55 +000030#include <boost/variant.hpp>
telsoa014fcda012018-03-09 14:13:49 +000031
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000032#include <algorithm>
James Conroy7b4886f2019-04-11 10:23:58 +010033#include <chrono>
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000034#include <iterator>
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010035#include <fstream>
telsoa014fcda012018-03-09 14:13:49 +000036#include <map>
37#include <string>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000038#include <vector>
telsoa01c577f2c2018-08-31 09:22:23 +010039#include <type_traits>
40
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010041namespace
42{
43
44inline bool CheckRequestedBackendsAreValid(const std::vector<armnn::BackendId>& backendIds,
45 armnn::Optional<std::string&> invalidBackendIds = armnn::EmptyOptional())
46{
47 if (backendIds.empty())
48 {
49 return false;
50 }
51
52 armnn::BackendIdSet validBackendIds = armnn::BackendRegistryInstance().GetBackendIds();
53
54 bool allValid = true;
55 for (const auto& backendId : backendIds)
56 {
57 if (std::find(validBackendIds.begin(), validBackendIds.end(), backendId) == validBackendIds.end())
58 {
59 allValid = false;
60 if (invalidBackendIds)
61 {
62 if (!invalidBackendIds.value().empty())
63 {
64 invalidBackendIds.value() += ", ";
65 }
66 invalidBackendIds.value() += backendId;
67 }
68 }
69 }
70 return allValid;
71}
72
73} // anonymous namespace
74
telsoa01c577f2c2018-08-31 09:22:23 +010075namespace InferenceModelInternal
76{
Jim Flynnb4d7eae2019-05-01 14:44:27 +010077using BindingPointInfo = armnn::BindingPointInfo;
telsoa01c577f2c2018-08-31 09:22:23 +010078
79using QuantizationParams = std::pair<float,int32_t>;
80
81struct Params
82{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000083 std::string m_ModelPath;
84 std::vector<std::string> m_InputBindings;
85 std::vector<armnn::TensorShape> m_InputShapes;
86 std::vector<std::string> m_OutputBindings;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000087 std::vector<armnn::BackendId> m_ComputeDevices;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000088 size_t m_SubgraphId;
89 bool m_IsModelBinary;
90 bool m_VisualizePostOptimizationModel;
91 bool m_EnableFp16TurboMode;
telsoa01c577f2c2018-08-31 09:22:23 +010092
93 Params()
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000094 : m_ComputeDevices{"CpuRef"}
telsoa01c577f2c2018-08-31 09:22:23 +010095 , m_SubgraphId(0)
96 , m_IsModelBinary(true)
97 , m_VisualizePostOptimizationModel(false)
98 , m_EnableFp16TurboMode(false)
99 {}
100};
101
102} // namespace InferenceModelInternal
103
104template <typename IParser>
105struct CreateNetworkImpl
106{
107public:
108 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100109
110 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100111 std::vector<armnn::BindingPointInfo>& inputBindings,
112 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100113 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000114 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100115
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000116 // Create a network from a file on disk
117 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100118
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000119 std::map<std::string, armnn::TensorShape> inputShapes;
120 if (!params.m_InputShapes.empty())
121 {
122 const size_t numInputShapes = params.m_InputShapes.size();
123 const size_t numInputBindings = params.m_InputBindings.size();
124 if (numInputShapes < numInputBindings)
125 {
126 throw armnn::Exception(boost::str(boost::format(
127 "Not every input has its tensor shape specified: expected=%1%, got=%2%")
128 % numInputBindings % numInputShapes));
129 }
telsoa01c577f2c2018-08-31 09:22:23 +0100130
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000131 for (size_t i = 0; i < numInputShapes; i++)
132 {
133 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
134 }
135 }
telsoa01c577f2c2018-08-31 09:22:23 +0100136
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000137 std::vector<std::string> requestedOutputs = params.m_OutputBindings;
138 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
139
140 {
141 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
142 // Handle text and binary input differently by calling the corresponding parser function
143 network = (params.m_IsModelBinary ?
144 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
145 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
146 }
147
148 for (const std::string& inputLayerName : params.m_InputBindings)
149 {
150 inputBindings.push_back(parser->GetNetworkInputBindingInfo(inputLayerName));
151 }
152
153 for (const std::string& outputLayerName : params.m_OutputBindings)
154 {
155 outputBindings.push_back(parser->GetNetworkOutputBindingInfo(outputLayerName));
156 }
157
158 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100159 }
160};
161
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000162#if defined(ARMNN_SERIALIZER)
163template <>
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000164struct CreateNetworkImpl<armnnDeserializer::IDeserializer>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000165{
166public:
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000167 using IParser = armnnDeserializer::IDeserializer;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000168 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000169
170 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100171 std::vector<armnn::BindingPointInfo>& inputBindings,
172 std::vector<armnn::BindingPointInfo>& outputBindings)
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000173 {
174 auto parser(IParser::Create());
175 BOOST_ASSERT(parser);
176
177 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
178
179 {
180 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000181
182 boost::system::error_code errorCode;
183 boost::filesystem::path pathToFile(params.m_ModelPath);
184 if (!boost::filesystem::exists(pathToFile, errorCode))
185 {
186 throw armnn::FileNotFoundException(boost::str(
187 boost::format("Cannot find the file (%1%) errorCode: %2% %3%") %
188 params.m_ModelPath %
189 errorCode %
190 CHECK_LOCATION().AsString()));
191 }
192 std::ifstream file(params.m_ModelPath, std::ios::binary);
193
194 network = parser->CreateNetworkFromBinary(file);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000195 }
196
Derek Lambertiff05cc52019-04-26 13:05:17 +0100197 unsigned int subgraphId = boost::numeric_cast<unsigned int>(params.m_SubgraphId);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000198
199 for (const std::string& inputLayerName : params.m_InputBindings)
200 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000201 armnnDeserializer::BindingPointInfo inputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100202 parser->GetNetworkInputBindingInfo(subgraphId, inputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000203 inputBindings.push_back(std::make_pair(inputBinding.m_BindingId, inputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000204 }
205
206 for (const std::string& outputLayerName : params.m_OutputBindings)
207 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000208 armnnDeserializer::BindingPointInfo outputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100209 parser->GetNetworkOutputBindingInfo(subgraphId, outputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000210 outputBindings.push_back(std::make_pair(outputBinding.m_BindingId, outputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000211 }
212
213 return network;
214 }
215};
216#endif
217
telsoa01c577f2c2018-08-31 09:22:23 +0100218#if defined(ARMNN_TF_LITE_PARSER)
219template <>
220struct CreateNetworkImpl<armnnTfLiteParser::ITfLiteParser>
221{
222public:
223 using IParser = armnnTfLiteParser::ITfLiteParser;
224 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100225
226 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100227 std::vector<armnn::BindingPointInfo>& inputBindings,
228 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100229 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000230 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100231
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000232 // Create a network from a file on disk
233 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100234
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000235 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100236
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000237 {
238 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
239 network = parser->CreateNetworkFromBinaryFile(modelPath.c_str());
240 }
telsoa01c577f2c2018-08-31 09:22:23 +0100241
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000242 for (const std::string& inputLayerName : params.m_InputBindings)
243 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100244 armnn::BindingPointInfo inputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000245 parser->GetNetworkInputBindingInfo(params.m_SubgraphId, inputLayerName);
246 inputBindings.push_back(inputBinding);
247 }
248
249 for (const std::string& outputLayerName : params.m_OutputBindings)
250 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100251 armnn::BindingPointInfo outputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000252 parser->GetNetworkOutputBindingInfo(params.m_SubgraphId, outputLayerName);
253 outputBindings.push_back(outputBinding);
254 }
255
256 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100257 }
258};
259#endif
260
261#if defined(ARMNN_ONNX_PARSER)
262template <>
263struct CreateNetworkImpl<armnnOnnxParser::IOnnxParser>
264{
265public:
266 using IParser = armnnOnnxParser::IOnnxParser;
267 using Params = InferenceModelInternal::Params;
268 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
269
270 static armnn::INetworkPtr Create(const Params& params,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000271 std::vector<BindingPointInfo>& inputBindings,
272 std::vector<BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100273 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000274 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100275
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000276 // Create a network from a file on disk
277 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100278
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000279 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100280
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000281 {
282 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
283 network = (params.m_IsModelBinary ?
284 parser->CreateNetworkFromBinaryFile(modelPath.c_str()) :
285 parser->CreateNetworkFromTextFile(modelPath.c_str()));
286 }
telsoa01c577f2c2018-08-31 09:22:23 +0100287
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000288 for (const std::string& inputLayerName : params.m_InputBindings)
289 {
290 BindingPointInfo inputBinding = parser->GetNetworkInputBindingInfo(inputLayerName);
291 inputBindings.push_back(inputBinding);
292 }
293
294 for (const std::string& outputLayerName : params.m_OutputBindings)
295 {
296 BindingPointInfo outputBinding = parser->GetNetworkOutputBindingInfo(outputLayerName);
297 outputBindings.push_back(outputBinding);
298 }
299
300 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100301 }
302};
303#endif
telsoa014fcda012018-03-09 14:13:49 +0000304
305template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000306inline armnn::InputTensors MakeInputTensors(
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100307 const std::vector<armnn::BindingPointInfo>& inputBindings,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000308 const std::vector<TContainer>& inputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000309{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000310 armnn::InputTensors inputTensors;
311
312 const size_t numInputs = inputBindings.size();
313 if (numInputs != inputDataContainers.size())
telsoa014fcda012018-03-09 14:13:49 +0000314 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000315 throw armnn::Exception(boost::str(boost::format("Number of inputs does not match number of "
316 "tensor data containers: %1% != %2%") % numInputs % inputDataContainers.size()));
telsoa014fcda012018-03-09 14:13:49 +0000317 }
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000318
319 for (size_t i = 0; i < numInputs; i++)
320 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100321 const armnn::BindingPointInfo& inputBinding = inputBindings[i];
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000322 const TContainer& inputData = inputDataContainers[i];
323
Ferran Balaguerc602f292019-02-08 17:09:55 +0000324 boost::apply_visitor([&](auto&& value)
325 {
326 if (value.size() != inputBinding.second.GetNumElements())
327 {
328 throw armnn::Exception("Input tensor has incorrect size");
329 }
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000330
Ferran Balaguerc602f292019-02-08 17:09:55 +0000331 armnn::ConstTensor inputTensor(inputBinding.second, value.data());
332 inputTensors.push_back(std::make_pair(inputBinding.first, inputTensor));
333 },
334 inputData);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000335 }
336
337 return inputTensors;
telsoa014fcda012018-03-09 14:13:49 +0000338}
339
340template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000341inline armnn::OutputTensors MakeOutputTensors(
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100342 const std::vector<armnn::BindingPointInfo>& outputBindings,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000343 std::vector<TContainer>& outputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000344{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000345 armnn::OutputTensors outputTensors;
346
347 const size_t numOutputs = outputBindings.size();
348 if (numOutputs != outputDataContainers.size())
telsoa014fcda012018-03-09 14:13:49 +0000349 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000350 throw armnn::Exception(boost::str(boost::format("Number of outputs does not match number of "
351 "tensor data containers: %1% != %2%") % numOutputs % outputDataContainers.size()));
telsoa014fcda012018-03-09 14:13:49 +0000352 }
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000353
354 for (size_t i = 0; i < numOutputs; i++)
355 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100356 const armnn::BindingPointInfo& outputBinding = outputBindings[i];
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000357 TContainer& outputData = outputDataContainers[i];
358
Ferran Balaguerc602f292019-02-08 17:09:55 +0000359 boost::apply_visitor([&](auto&& value)
360 {
361 if (value.size() != outputBinding.second.GetNumElements())
362 {
363 throw armnn::Exception("Output tensor has incorrect size");
364 }
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000365
Ferran Balaguerc602f292019-02-08 17:09:55 +0000366 armnn::Tensor outputTensor(outputBinding.second, value.data());
367 outputTensors.push_back(std::make_pair(outputBinding.first, outputTensor));
368 },
369 outputData);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000370 }
371
372 return outputTensors;
telsoa014fcda012018-03-09 14:13:49 +0000373}
374
375template <typename IParser, typename TDataType>
376class InferenceModel
377{
378public:
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000379 using DataType = TDataType;
380 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000381 using QuantizationParams = InferenceModelInternal::QuantizationParams;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000382 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
telsoa014fcda012018-03-09 14:13:49 +0000383
384 struct CommandLineOptions
385 {
386 std::string m_ModelDir;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000387 std::vector<std::string> m_ComputeDevices;
surmeh013537c2c2018-05-18 16:31:43 +0100388 bool m_VisualizePostOptimizationModel;
telsoa01c577f2c2018-08-31 09:22:23 +0100389 bool m_EnableFp16TurboMode;
Pablo Tello507f39d2019-04-15 15:44:39 +0100390 std::string m_Labels;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000391
392 std::vector<armnn::BackendId> GetComputeDevicesAsBackendIds()
393 {
394 std::vector<armnn::BackendId> backendIds;
395 std::copy(m_ComputeDevices.begin(), m_ComputeDevices.end(), std::back_inserter(backendIds));
396 return backendIds;
397 }
telsoa014fcda012018-03-09 14:13:49 +0000398 };
399
400 static void AddCommandLineOptions(boost::program_options::options_description& desc, CommandLineOptions& options)
401 {
402 namespace po = boost::program_options;
403
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000404 const std::vector<std::string> defaultComputes = { "CpuAcc", "CpuRef" };
David Beckf0b48452018-10-19 15:20:56 +0100405
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100406 const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
407 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
408
telsoa014fcda012018-03-09 14:13:49 +0000409 desc.add_options()
410 ("model-dir,m", po::value<std::string>(&options.m_ModelDir)->required(),
telsoa01c577f2c2018-08-31 09:22:23 +0100411 "Path to directory containing model files (.caffemodel/.prototxt/.tflite)")
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000412 ("compute,c", po::value<std::vector<std::string>>(&options.m_ComputeDevices)->
413 default_value(defaultComputes, boost::algorithm::join(defaultComputes, ", "))->
414 multitoken(), backendsMessage.c_str())
Pablo Tello507f39d2019-04-15 15:44:39 +0100415 ("labels,l", po::value<std::string>(&options.m_Labels),
416 "Text file containing one image filename - correct label pair per line, "
417 "used to test the accuracy of the network.")
surmeh013537c2c2018-05-18 16:31:43 +0100418 ("visualize-optimized-model,v",
419 po::value<bool>(&options.m_VisualizePostOptimizationModel)->default_value(false),
420 "Produce a dot file useful for visualizing the graph post optimization."
telsoa01c577f2c2018-08-31 09:22:23 +0100421 "The file will have the same name as the model with the .dot extention.")
422 ("fp16-turbo-mode", po::value<bool>(&options.m_EnableFp16TurboMode)->default_value(false),
423 "If this option is enabled FP32 layers, weights and biases will be converted "
424 "to FP16 where the backend supports it.");
telsoa014fcda012018-03-09 14:13:49 +0000425 }
426
Matthew Bentham3e68b972019-04-09 13:10:46 +0100427 InferenceModel(const Params& params,
428 bool enableProfiling,
429 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
430 : m_EnableProfiling(enableProfiling)
telsoa014fcda012018-03-09 14:13:49 +0000431 {
telsoa01c577f2c2018-08-31 09:22:23 +0100432 if (runtime)
telsoa014fcda012018-03-09 14:13:49 +0000433 {
telsoa01c577f2c2018-08-31 09:22:23 +0100434 m_Runtime = runtime;
telsoa014fcda012018-03-09 14:13:49 +0000435 }
telsoa01c577f2c2018-08-31 09:22:23 +0100436 else
telsoa014fcda012018-03-09 14:13:49 +0000437 {
telsoa01c577f2c2018-08-31 09:22:23 +0100438 armnn::IRuntime::CreationOptions options;
Nina Drozd549ae372018-09-10 14:26:44 +0100439 options.m_EnableGpuProfiling = m_EnableProfiling;
telsoa01c577f2c2018-08-31 09:22:23 +0100440 m_Runtime = std::move(armnn::IRuntime::Create(options));
surmeh013537c2c2018-05-18 16:31:43 +0100441 }
telsoa014fcda012018-03-09 14:13:49 +0000442
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100443 std::string invalidBackends;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000444 if (!CheckRequestedBackendsAreValid(params.m_ComputeDevices, armnn::Optional<std::string&>(invalidBackends)))
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100445 {
446 throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends);
447 }
448
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000449 armnn::INetworkPtr network =
450 CreateNetworkImpl<IParser>::Create(params, m_InputBindings, m_OutputBindings);
telsoa014fcda012018-03-09 14:13:49 +0000451
surmeh013537c2c2018-05-18 16:31:43 +0100452 armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork *){}};
453 {
454 ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
telsoa01c577f2c2018-08-31 09:22:23 +0100455
456 armnn::OptimizerOptions options;
457 options.m_ReduceFp32ToFp16 = params.m_EnableFp16TurboMode;
458
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000459 optNet = armnn::Optimize(*network, params.m_ComputeDevices, m_Runtime->GetDeviceSpec(), options);
telsoa01c577f2c2018-08-31 09:22:23 +0100460 if (!optNet)
461 {
462 throw armnn::Exception("Optimize returned nullptr");
463 }
surmeh013537c2c2018-05-18 16:31:43 +0100464 }
telsoa014fcda012018-03-09 14:13:49 +0000465
surmeh013537c2c2018-05-18 16:31:43 +0100466 if (params.m_VisualizePostOptimizationModel)
467 {
468 boost::filesystem::path filename = params.m_ModelPath;
469 filename.replace_extension("dot");
470 std::fstream file(filename.c_str(),file.out);
471 optNet->SerializeToDot(file);
472 }
473
474 armnn::Status ret;
475 {
476 ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");
477 ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet));
478 }
479
telsoa014fcda012018-03-09 14:13:49 +0000480 if (ret == armnn::Status::Failure)
481 {
482 throw armnn::Exception("IRuntime::LoadNetwork failed");
483 }
484 }
485
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000486 void CheckInputIndexIsValid(unsigned int inputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000487 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000488 if (m_InputBindings.size() < inputIndex + 1)
489 {
490 throw armnn::Exception(boost::str(boost::format("Input index out of range: %1%") % inputIndex));
491 }
telsoa014fcda012018-03-09 14:13:49 +0000492 }
493
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000494 void CheckOutputIndexIsValid(unsigned int outputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000495 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000496 if (m_OutputBindings.size() < outputIndex + 1)
497 {
498 throw armnn::Exception(boost::str(boost::format("Output index out of range: %1%") % outputIndex));
499 }
500 }
501
502 unsigned int GetOutputSize(unsigned int outputIndex = 0u) const
503 {
504 CheckOutputIndexIsValid(outputIndex);
505 return m_OutputBindings[outputIndex].second.GetNumElements();
506 }
507
James Conroy7b4886f2019-04-11 10:23:58 +0100508 std::chrono::duration<double, std::milli> Run(
509 const std::vector<TContainer>& inputContainers,
510 std::vector<TContainer>& outputContainers)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000511 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000512 for (unsigned int i = 0; i < outputContainers.size(); ++i)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000513 {
514 const unsigned int expectedOutputDataSize = GetOutputSize(i);
Ferran Balaguerc602f292019-02-08 17:09:55 +0000515
516 boost::apply_visitor([expectedOutputDataSize, i](auto&& value)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000517 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000518 const unsigned int actualOutputDataSize = boost::numeric_cast<unsigned int>(value.size());
519 if (actualOutputDataSize < expectedOutputDataSize)
520 {
521 unsigned int outputIndex = boost::numeric_cast<unsigned int>(i);
522 throw armnn::Exception(
523 boost::str(boost::format("Not enough data for output #%1%: expected "
524 "%2% elements, got %3%") % outputIndex % expectedOutputDataSize % actualOutputDataSize));
525 }
526 },
527 outputContainers[i]);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000528 }
telsoa01c577f2c2018-08-31 09:22:23 +0100529
530 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
531 if (profiler)
532 {
533 profiler->EnableProfiling(m_EnableProfiling);
534 }
535
James Conroy7b4886f2019-04-11 10:23:58 +0100536 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
537 const auto start_time = GetCurrentTime();
538
telsoa014fcda012018-03-09 14:13:49 +0000539 armnn::Status ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000540 MakeInputTensors(inputContainers),
541 MakeOutputTensors(outputContainers));
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100542
James Conroy7b4886f2019-04-11 10:23:58 +0100543 const auto end_time = GetCurrentTime();
544
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100545 // if profiling is enabled print out the results
546 if (profiler && profiler->IsProfilingEnabled())
547 {
548 profiler->Print(std::cout);
549 }
550
telsoa014fcda012018-03-09 14:13:49 +0000551 if (ret == armnn::Status::Failure)
552 {
553 throw armnn::Exception("IRuntime::EnqueueWorkload failed");
554 }
James Conroy7b4886f2019-04-11 10:23:58 +0100555 else
556 {
557 return std::chrono::duration<double, std::milli>(end_time - start_time);
558 }
telsoa014fcda012018-03-09 14:13:49 +0000559 }
560
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100561 const armnn::BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100562 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000563 CheckInputIndexIsValid(inputIndex);
564 return m_InputBindings[inputIndex];
telsoa01c577f2c2018-08-31 09:22:23 +0100565 }
566
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100567 const std::vector<armnn::BindingPointInfo>& GetInputBindingInfos() const
telsoa01c577f2c2018-08-31 09:22:23 +0100568 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000569 return m_InputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100570 }
571
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100572 const armnn::BindingPointInfo& GetOutputBindingInfo(unsigned int outputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100573 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000574 CheckOutputIndexIsValid(outputIndex);
575 return m_OutputBindings[outputIndex];
576 }
577
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100578 const std::vector<armnn::BindingPointInfo>& GetOutputBindingInfos() const
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000579 {
580 return m_OutputBindings;
581 }
582
583 QuantizationParams GetQuantizationParams(unsigned int outputIndex = 0u) const
584 {
585 CheckOutputIndexIsValid(outputIndex);
586 return std::make_pair(m_OutputBindings[outputIndex].second.GetQuantizationScale(),
587 m_OutputBindings[outputIndex].second.GetQuantizationOffset());
588 }
589
Narumol Prangnawarat4628d052019-02-25 17:26:05 +0000590 QuantizationParams GetInputQuantizationParams(unsigned int inputIndex = 0u) const
591 {
592 CheckInputIndexIsValid(inputIndex);
593 return std::make_pair(m_InputBindings[inputIndex].second.GetQuantizationScale(),
594 m_InputBindings[inputIndex].second.GetQuantizationOffset());
595 }
596
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000597 std::vector<QuantizationParams> GetAllQuantizationParams() const
598 {
599 std::vector<QuantizationParams> quantizationParams;
600 for (unsigned int i = 0u; i < m_OutputBindings.size(); i++)
601 {
602 quantizationParams.push_back(GetQuantizationParams(i));
603 }
604 return quantizationParams;
telsoa01c577f2c2018-08-31 09:22:23 +0100605 }
606
telsoa014fcda012018-03-09 14:13:49 +0000607private:
telsoa01c577f2c2018-08-31 09:22:23 +0100608 armnn::NetworkId m_NetworkIdentifier;
609 std::shared_ptr<armnn::IRuntime> m_Runtime;
610
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100611 std::vector<armnn::BindingPointInfo> m_InputBindings;
612 std::vector<armnn::BindingPointInfo> m_OutputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100613 bool m_EnableProfiling;
614
telsoa014fcda012018-03-09 14:13:49 +0000615 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000616 armnn::InputTensors MakeInputTensors(const std::vector<TContainer>& inputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000617 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000618 return ::MakeInputTensors(m_InputBindings, inputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000619 }
620
621 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000622 armnn::OutputTensors MakeOutputTensors(std::vector<TContainer>& outputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000623 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000624 return ::MakeOutputTensors(m_OutputBindings, outputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000625 }
James Conroy7b4886f2019-04-11 10:23:58 +0100626
627 std::chrono::high_resolution_clock::time_point GetCurrentTime()
628 {
629 return std::chrono::high_resolution_clock::now();
630 }
631
632 std::chrono::duration<double, std::milli> GetTimeDuration(
633 std::chrono::high_resolution_clock::time_point& start_time,
634 std::chrono::high_resolution_clock::time_point& end_time)
635 {
636 return std::chrono::duration<double, std::milli>(end_time - start_time);
637 }
638
Ferran Balaguerc602f292019-02-08 17:09:55 +0000639};