blob: 72c5edac299e60d13b85102cfc33925f8293a651 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
David Beckf0b48452018-10-19 15:20:56 +01006#include <armnn/ArmNN.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +01007
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +00008#if defined(ARMNN_SERIALIZER)
Derek Lamberti0028d1b2019-02-20 13:57:42 +00009#include "armnnDeserializer/IDeserializer.hpp"
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000010#endif
telsoa01c577f2c2018-08-31 09:22:23 +010011#if defined(ARMNN_TF_LITE_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010012#include <armnnTfLiteParser/ITfLiteParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010013#endif
telsoa01c577f2c2018-08-31 09:22:23 +010014#if defined(ARMNN_ONNX_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010015#include <armnnOnnxParser/IOnnxParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010016#endif
telsoa014fcda012018-03-09 14:13:49 +000017
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000018#include <HeapProfiling.hpp>
19
David Beck1b61be52018-11-08 09:19:14 +000020#include <backendsCommon/BackendRegistry.hpp>
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010021
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000022#include <boost/algorithm/string/join.hpp>
surmeh013537c2c2018-05-18 16:31:43 +010023#include <boost/exception/exception.hpp>
24#include <boost/exception/diagnostic_information.hpp>
telsoa014fcda012018-03-09 14:13:49 +000025#include <boost/log/trivial.hpp>
26#include <boost/format.hpp>
27#include <boost/program_options.hpp>
surmeh013537c2c2018-05-18 16:31:43 +010028#include <boost/filesystem.hpp>
David Beckf0b48452018-10-19 15:20:56 +010029#include <boost/lexical_cast.hpp>
Ferran Balaguerc602f292019-02-08 17:09:55 +000030#include <boost/variant.hpp>
telsoa014fcda012018-03-09 14:13:49 +000031
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000032#include <algorithm>
James Conroy7b4886f2019-04-11 10:23:58 +010033#include <chrono>
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000034#include <iterator>
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010035#include <fstream>
telsoa014fcda012018-03-09 14:13:49 +000036#include <map>
37#include <string>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000038#include <vector>
telsoa01c577f2c2018-08-31 09:22:23 +010039#include <type_traits>
40
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010041namespace
42{
43
44inline bool CheckRequestedBackendsAreValid(const std::vector<armnn::BackendId>& backendIds,
45 armnn::Optional<std::string&> invalidBackendIds = armnn::EmptyOptional())
46{
47 if (backendIds.empty())
48 {
49 return false;
50 }
51
52 armnn::BackendIdSet validBackendIds = armnn::BackendRegistryInstance().GetBackendIds();
53
54 bool allValid = true;
55 for (const auto& backendId : backendIds)
56 {
57 if (std::find(validBackendIds.begin(), validBackendIds.end(), backendId) == validBackendIds.end())
58 {
59 allValid = false;
60 if (invalidBackendIds)
61 {
62 if (!invalidBackendIds.value().empty())
63 {
64 invalidBackendIds.value() += ", ";
65 }
66 invalidBackendIds.value() += backendId;
67 }
68 }
69 }
70 return allValid;
71}
72
73} // anonymous namespace
74
telsoa01c577f2c2018-08-31 09:22:23 +010075namespace InferenceModelInternal
76{
77// This needs to go when the armnnCaffeParser, armnnTfParser and armnnTfLiteParser
78// definitions of BindingPointInfo gets consolidated.
79using BindingPointInfo = std::pair<armnn::LayerBindingId, armnn::TensorInfo>;
80
81using QuantizationParams = std::pair<float,int32_t>;
82
83struct Params
84{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000085 std::string m_ModelPath;
86 std::vector<std::string> m_InputBindings;
87 std::vector<armnn::TensorShape> m_InputShapes;
88 std::vector<std::string> m_OutputBindings;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000089 std::vector<armnn::BackendId> m_ComputeDevices;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000090 size_t m_SubgraphId;
91 bool m_IsModelBinary;
92 bool m_VisualizePostOptimizationModel;
93 bool m_EnableFp16TurboMode;
telsoa01c577f2c2018-08-31 09:22:23 +010094
95 Params()
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000096 : m_ComputeDevices{"CpuRef"}
telsoa01c577f2c2018-08-31 09:22:23 +010097 , m_SubgraphId(0)
98 , m_IsModelBinary(true)
99 , m_VisualizePostOptimizationModel(false)
100 , m_EnableFp16TurboMode(false)
101 {}
102};
103
104} // namespace InferenceModelInternal
105
106template <typename IParser>
107struct CreateNetworkImpl
108{
109public:
110 using Params = InferenceModelInternal::Params;
111 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
112
113 static armnn::INetworkPtr Create(const Params& params,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000114 std::vector<BindingPointInfo>& inputBindings,
115 std::vector<BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100116 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000117 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100118
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000119 // Create a network from a file on disk
120 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100121
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000122 std::map<std::string, armnn::TensorShape> inputShapes;
123 if (!params.m_InputShapes.empty())
124 {
125 const size_t numInputShapes = params.m_InputShapes.size();
126 const size_t numInputBindings = params.m_InputBindings.size();
127 if (numInputShapes < numInputBindings)
128 {
129 throw armnn::Exception(boost::str(boost::format(
130 "Not every input has its tensor shape specified: expected=%1%, got=%2%")
131 % numInputBindings % numInputShapes));
132 }
telsoa01c577f2c2018-08-31 09:22:23 +0100133
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000134 for (size_t i = 0; i < numInputShapes; i++)
135 {
136 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
137 }
138 }
telsoa01c577f2c2018-08-31 09:22:23 +0100139
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000140 std::vector<std::string> requestedOutputs = params.m_OutputBindings;
141 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
142
143 {
144 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
145 // Handle text and binary input differently by calling the corresponding parser function
146 network = (params.m_IsModelBinary ?
147 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
148 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
149 }
150
151 for (const std::string& inputLayerName : params.m_InputBindings)
152 {
153 inputBindings.push_back(parser->GetNetworkInputBindingInfo(inputLayerName));
154 }
155
156 for (const std::string& outputLayerName : params.m_OutputBindings)
157 {
158 outputBindings.push_back(parser->GetNetworkOutputBindingInfo(outputLayerName));
159 }
160
161 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100162 }
163};
164
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000165#if defined(ARMNN_SERIALIZER)
166template <>
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000167struct CreateNetworkImpl<armnnDeserializer::IDeserializer>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000168{
169public:
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000170 using IParser = armnnDeserializer::IDeserializer;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000171 using Params = InferenceModelInternal::Params;
172 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
173
174 static armnn::INetworkPtr Create(const Params& params,
175 std::vector<BindingPointInfo>& inputBindings,
176 std::vector<BindingPointInfo>& outputBindings)
177 {
178 auto parser(IParser::Create());
179 BOOST_ASSERT(parser);
180
181 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
182
183 {
184 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000185
186 boost::system::error_code errorCode;
187 boost::filesystem::path pathToFile(params.m_ModelPath);
188 if (!boost::filesystem::exists(pathToFile, errorCode))
189 {
190 throw armnn::FileNotFoundException(boost::str(
191 boost::format("Cannot find the file (%1%) errorCode: %2% %3%") %
192 params.m_ModelPath %
193 errorCode %
194 CHECK_LOCATION().AsString()));
195 }
196 std::ifstream file(params.m_ModelPath, std::ios::binary);
197
198 network = parser->CreateNetworkFromBinary(file);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000199 }
200
201 unsigned int subGraphId = boost::numeric_cast<unsigned int>(params.m_SubgraphId);
202
203 for (const std::string& inputLayerName : params.m_InputBindings)
204 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000205 armnnDeserializer::BindingPointInfo inputBinding =
206 parser->GetNetworkInputBindingInfo(subGraphId, inputLayerName);
207 inputBindings.push_back(std::make_pair(inputBinding.m_BindingId, inputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000208 }
209
210 for (const std::string& outputLayerName : params.m_OutputBindings)
211 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000212 armnnDeserializer::BindingPointInfo outputBinding =
213 parser->GetNetworkOutputBindingInfo(subGraphId, outputLayerName);
214 outputBindings.push_back(std::make_pair(outputBinding.m_BindingId, outputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000215 }
216
217 return network;
218 }
219};
220#endif
221
telsoa01c577f2c2018-08-31 09:22:23 +0100222#if defined(ARMNN_TF_LITE_PARSER)
223template <>
224struct CreateNetworkImpl<armnnTfLiteParser::ITfLiteParser>
225{
226public:
227 using IParser = armnnTfLiteParser::ITfLiteParser;
228 using Params = InferenceModelInternal::Params;
229 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
230
231 static armnn::INetworkPtr Create(const Params& params,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000232 std::vector<BindingPointInfo>& inputBindings,
233 std::vector<BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100234 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000235 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100236
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000237 // Create a network from a file on disk
238 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100239
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000240 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100241
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000242 {
243 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
244 network = parser->CreateNetworkFromBinaryFile(modelPath.c_str());
245 }
telsoa01c577f2c2018-08-31 09:22:23 +0100246
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000247 for (const std::string& inputLayerName : params.m_InputBindings)
248 {
249 BindingPointInfo inputBinding =
250 parser->GetNetworkInputBindingInfo(params.m_SubgraphId, inputLayerName);
251 inputBindings.push_back(inputBinding);
252 }
253
254 for (const std::string& outputLayerName : params.m_OutputBindings)
255 {
256 BindingPointInfo outputBinding =
257 parser->GetNetworkOutputBindingInfo(params.m_SubgraphId, outputLayerName);
258 outputBindings.push_back(outputBinding);
259 }
260
261 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100262 }
263};
264#endif
265
266#if defined(ARMNN_ONNX_PARSER)
267template <>
268struct CreateNetworkImpl<armnnOnnxParser::IOnnxParser>
269{
270public:
271 using IParser = armnnOnnxParser::IOnnxParser;
272 using Params = InferenceModelInternal::Params;
273 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
274
275 static armnn::INetworkPtr Create(const Params& params,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000276 std::vector<BindingPointInfo>& inputBindings,
277 std::vector<BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100278 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000279 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100280
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000281 // Create a network from a file on disk
282 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100283
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000284 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100285
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000286 {
287 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
288 network = (params.m_IsModelBinary ?
289 parser->CreateNetworkFromBinaryFile(modelPath.c_str()) :
290 parser->CreateNetworkFromTextFile(modelPath.c_str()));
291 }
telsoa01c577f2c2018-08-31 09:22:23 +0100292
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000293 for (const std::string& inputLayerName : params.m_InputBindings)
294 {
295 BindingPointInfo inputBinding = parser->GetNetworkInputBindingInfo(inputLayerName);
296 inputBindings.push_back(inputBinding);
297 }
298
299 for (const std::string& outputLayerName : params.m_OutputBindings)
300 {
301 BindingPointInfo outputBinding = parser->GetNetworkOutputBindingInfo(outputLayerName);
302 outputBindings.push_back(outputBinding);
303 }
304
305 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100306 }
307};
308#endif
telsoa014fcda012018-03-09 14:13:49 +0000309
310template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000311inline armnn::InputTensors MakeInputTensors(
312 const std::vector<InferenceModelInternal::BindingPointInfo>& inputBindings,
313 const std::vector<TContainer>& inputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000314{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000315 armnn::InputTensors inputTensors;
316
317 const size_t numInputs = inputBindings.size();
318 if (numInputs != inputDataContainers.size())
telsoa014fcda012018-03-09 14:13:49 +0000319 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000320 throw armnn::Exception(boost::str(boost::format("Number of inputs does not match number of "
321 "tensor data containers: %1% != %2%") % numInputs % inputDataContainers.size()));
telsoa014fcda012018-03-09 14:13:49 +0000322 }
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000323
324 for (size_t i = 0; i < numInputs; i++)
325 {
326 const InferenceModelInternal::BindingPointInfo& inputBinding = inputBindings[i];
327 const TContainer& inputData = inputDataContainers[i];
328
Ferran Balaguerc602f292019-02-08 17:09:55 +0000329 boost::apply_visitor([&](auto&& value)
330 {
331 if (value.size() != inputBinding.second.GetNumElements())
332 {
333 throw armnn::Exception("Input tensor has incorrect size");
334 }
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000335
Ferran Balaguerc602f292019-02-08 17:09:55 +0000336 armnn::ConstTensor inputTensor(inputBinding.second, value.data());
337 inputTensors.push_back(std::make_pair(inputBinding.first, inputTensor));
338 },
339 inputData);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000340 }
341
342 return inputTensors;
telsoa014fcda012018-03-09 14:13:49 +0000343}
344
345template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000346inline armnn::OutputTensors MakeOutputTensors(
347 const std::vector<InferenceModelInternal::BindingPointInfo>& outputBindings,
348 std::vector<TContainer>& outputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000349{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000350 armnn::OutputTensors outputTensors;
351
352 const size_t numOutputs = outputBindings.size();
353 if (numOutputs != outputDataContainers.size())
telsoa014fcda012018-03-09 14:13:49 +0000354 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000355 throw armnn::Exception(boost::str(boost::format("Number of outputs does not match number of "
356 "tensor data containers: %1% != %2%") % numOutputs % outputDataContainers.size()));
telsoa014fcda012018-03-09 14:13:49 +0000357 }
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000358
359 for (size_t i = 0; i < numOutputs; i++)
360 {
361 const InferenceModelInternal::BindingPointInfo& outputBinding = outputBindings[i];
362 TContainer& outputData = outputDataContainers[i];
363
Ferran Balaguerc602f292019-02-08 17:09:55 +0000364 boost::apply_visitor([&](auto&& value)
365 {
366 if (value.size() != outputBinding.second.GetNumElements())
367 {
368 throw armnn::Exception("Output tensor has incorrect size");
369 }
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000370
Ferran Balaguerc602f292019-02-08 17:09:55 +0000371 armnn::Tensor outputTensor(outputBinding.second, value.data());
372 outputTensors.push_back(std::make_pair(outputBinding.first, outputTensor));
373 },
374 outputData);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000375 }
376
377 return outputTensors;
telsoa014fcda012018-03-09 14:13:49 +0000378}
379
380template <typename IParser, typename TDataType>
381class InferenceModel
382{
383public:
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000384 using DataType = TDataType;
385 using Params = InferenceModelInternal::Params;
386 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
387 using QuantizationParams = InferenceModelInternal::QuantizationParams;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000388 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
telsoa014fcda012018-03-09 14:13:49 +0000389
390 struct CommandLineOptions
391 {
392 std::string m_ModelDir;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000393 std::vector<std::string> m_ComputeDevices;
surmeh013537c2c2018-05-18 16:31:43 +0100394 bool m_VisualizePostOptimizationModel;
telsoa01c577f2c2018-08-31 09:22:23 +0100395 bool m_EnableFp16TurboMode;
Pablo Tello507f39d2019-04-15 15:44:39 +0100396 std::string m_Labels;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000397
398 std::vector<armnn::BackendId> GetComputeDevicesAsBackendIds()
399 {
400 std::vector<armnn::BackendId> backendIds;
401 std::copy(m_ComputeDevices.begin(), m_ComputeDevices.end(), std::back_inserter(backendIds));
402 return backendIds;
403 }
telsoa014fcda012018-03-09 14:13:49 +0000404 };
405
406 static void AddCommandLineOptions(boost::program_options::options_description& desc, CommandLineOptions& options)
407 {
408 namespace po = boost::program_options;
409
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000410 const std::vector<std::string> defaultComputes = { "CpuAcc", "CpuRef" };
David Beckf0b48452018-10-19 15:20:56 +0100411
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100412 const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
413 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
414
telsoa014fcda012018-03-09 14:13:49 +0000415 desc.add_options()
416 ("model-dir,m", po::value<std::string>(&options.m_ModelDir)->required(),
telsoa01c577f2c2018-08-31 09:22:23 +0100417 "Path to directory containing model files (.caffemodel/.prototxt/.tflite)")
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000418 ("compute,c", po::value<std::vector<std::string>>(&options.m_ComputeDevices)->
419 default_value(defaultComputes, boost::algorithm::join(defaultComputes, ", "))->
420 multitoken(), backendsMessage.c_str())
Pablo Tello507f39d2019-04-15 15:44:39 +0100421 ("labels,l", po::value<std::string>(&options.m_Labels),
422 "Text file containing one image filename - correct label pair per line, "
423 "used to test the accuracy of the network.")
surmeh013537c2c2018-05-18 16:31:43 +0100424 ("visualize-optimized-model,v",
425 po::value<bool>(&options.m_VisualizePostOptimizationModel)->default_value(false),
426 "Produce a dot file useful for visualizing the graph post optimization."
telsoa01c577f2c2018-08-31 09:22:23 +0100427 "The file will have the same name as the model with the .dot extention.")
428 ("fp16-turbo-mode", po::value<bool>(&options.m_EnableFp16TurboMode)->default_value(false),
429 "If this option is enabled FP32 layers, weights and biases will be converted "
430 "to FP16 where the backend supports it.");
telsoa014fcda012018-03-09 14:13:49 +0000431 }
432
Matthew Bentham3e68b972019-04-09 13:10:46 +0100433 InferenceModel(const Params& params,
434 bool enableProfiling,
435 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
436 : m_EnableProfiling(enableProfiling)
telsoa014fcda012018-03-09 14:13:49 +0000437 {
telsoa01c577f2c2018-08-31 09:22:23 +0100438 if (runtime)
telsoa014fcda012018-03-09 14:13:49 +0000439 {
telsoa01c577f2c2018-08-31 09:22:23 +0100440 m_Runtime = runtime;
telsoa014fcda012018-03-09 14:13:49 +0000441 }
telsoa01c577f2c2018-08-31 09:22:23 +0100442 else
telsoa014fcda012018-03-09 14:13:49 +0000443 {
telsoa01c577f2c2018-08-31 09:22:23 +0100444 armnn::IRuntime::CreationOptions options;
Nina Drozd549ae372018-09-10 14:26:44 +0100445 options.m_EnableGpuProfiling = m_EnableProfiling;
telsoa01c577f2c2018-08-31 09:22:23 +0100446 m_Runtime = std::move(armnn::IRuntime::Create(options));
surmeh013537c2c2018-05-18 16:31:43 +0100447 }
telsoa014fcda012018-03-09 14:13:49 +0000448
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100449 std::string invalidBackends;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000450 if (!CheckRequestedBackendsAreValid(params.m_ComputeDevices, armnn::Optional<std::string&>(invalidBackends)))
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100451 {
452 throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends);
453 }
454
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000455 armnn::INetworkPtr network =
456 CreateNetworkImpl<IParser>::Create(params, m_InputBindings, m_OutputBindings);
telsoa014fcda012018-03-09 14:13:49 +0000457
surmeh013537c2c2018-05-18 16:31:43 +0100458 armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork *){}};
459 {
460 ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
telsoa01c577f2c2018-08-31 09:22:23 +0100461
462 armnn::OptimizerOptions options;
463 options.m_ReduceFp32ToFp16 = params.m_EnableFp16TurboMode;
464
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000465 optNet = armnn::Optimize(*network, params.m_ComputeDevices, m_Runtime->GetDeviceSpec(), options);
telsoa01c577f2c2018-08-31 09:22:23 +0100466 if (!optNet)
467 {
468 throw armnn::Exception("Optimize returned nullptr");
469 }
surmeh013537c2c2018-05-18 16:31:43 +0100470 }
telsoa014fcda012018-03-09 14:13:49 +0000471
surmeh013537c2c2018-05-18 16:31:43 +0100472 if (params.m_VisualizePostOptimizationModel)
473 {
474 boost::filesystem::path filename = params.m_ModelPath;
475 filename.replace_extension("dot");
476 std::fstream file(filename.c_str(),file.out);
477 optNet->SerializeToDot(file);
478 }
479
480 armnn::Status ret;
481 {
482 ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");
483 ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet));
484 }
485
telsoa014fcda012018-03-09 14:13:49 +0000486 if (ret == armnn::Status::Failure)
487 {
488 throw armnn::Exception("IRuntime::LoadNetwork failed");
489 }
490 }
491
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000492 void CheckInputIndexIsValid(unsigned int inputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000493 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000494 if (m_InputBindings.size() < inputIndex + 1)
495 {
496 throw armnn::Exception(boost::str(boost::format("Input index out of range: %1%") % inputIndex));
497 }
telsoa014fcda012018-03-09 14:13:49 +0000498 }
499
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000500 void CheckOutputIndexIsValid(unsigned int outputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000501 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000502 if (m_OutputBindings.size() < outputIndex + 1)
503 {
504 throw armnn::Exception(boost::str(boost::format("Output index out of range: %1%") % outputIndex));
505 }
506 }
507
508 unsigned int GetOutputSize(unsigned int outputIndex = 0u) const
509 {
510 CheckOutputIndexIsValid(outputIndex);
511 return m_OutputBindings[outputIndex].second.GetNumElements();
512 }
513
James Conroy7b4886f2019-04-11 10:23:58 +0100514 std::chrono::duration<double, std::milli> Run(
515 const std::vector<TContainer>& inputContainers,
516 std::vector<TContainer>& outputContainers)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000517 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000518 for (unsigned int i = 0; i < outputContainers.size(); ++i)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000519 {
520 const unsigned int expectedOutputDataSize = GetOutputSize(i);
Ferran Balaguerc602f292019-02-08 17:09:55 +0000521
522 boost::apply_visitor([expectedOutputDataSize, i](auto&& value)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000523 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000524 const unsigned int actualOutputDataSize = boost::numeric_cast<unsigned int>(value.size());
525 if (actualOutputDataSize < expectedOutputDataSize)
526 {
527 unsigned int outputIndex = boost::numeric_cast<unsigned int>(i);
528 throw armnn::Exception(
529 boost::str(boost::format("Not enough data for output #%1%: expected "
530 "%2% elements, got %3%") % outputIndex % expectedOutputDataSize % actualOutputDataSize));
531 }
532 },
533 outputContainers[i]);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000534 }
telsoa01c577f2c2018-08-31 09:22:23 +0100535
536 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
537 if (profiler)
538 {
539 profiler->EnableProfiling(m_EnableProfiling);
540 }
541
James Conroy7b4886f2019-04-11 10:23:58 +0100542 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
543 const auto start_time = GetCurrentTime();
544
telsoa014fcda012018-03-09 14:13:49 +0000545 armnn::Status ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000546 MakeInputTensors(inputContainers),
547 MakeOutputTensors(outputContainers));
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100548
James Conroy7b4886f2019-04-11 10:23:58 +0100549 const auto end_time = GetCurrentTime();
550
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100551 // if profiling is enabled print out the results
552 if (profiler && profiler->IsProfilingEnabled())
553 {
554 profiler->Print(std::cout);
555 }
556
telsoa014fcda012018-03-09 14:13:49 +0000557 if (ret == armnn::Status::Failure)
558 {
559 throw armnn::Exception("IRuntime::EnqueueWorkload failed");
560 }
James Conroy7b4886f2019-04-11 10:23:58 +0100561 else
562 {
563 return std::chrono::duration<double, std::milli>(end_time - start_time);
564 }
telsoa014fcda012018-03-09 14:13:49 +0000565 }
566
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000567 const BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100568 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000569 CheckInputIndexIsValid(inputIndex);
570 return m_InputBindings[inputIndex];
telsoa01c577f2c2018-08-31 09:22:23 +0100571 }
572
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000573 const std::vector<BindingPointInfo>& GetInputBindingInfos() const
telsoa01c577f2c2018-08-31 09:22:23 +0100574 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000575 return m_InputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100576 }
577
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000578 const BindingPointInfo& GetOutputBindingInfo(unsigned int outputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100579 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000580 CheckOutputIndexIsValid(outputIndex);
581 return m_OutputBindings[outputIndex];
582 }
583
584 const std::vector<BindingPointInfo>& GetOutputBindingInfos() const
585 {
586 return m_OutputBindings;
587 }
588
589 QuantizationParams GetQuantizationParams(unsigned int outputIndex = 0u) const
590 {
591 CheckOutputIndexIsValid(outputIndex);
592 return std::make_pair(m_OutputBindings[outputIndex].second.GetQuantizationScale(),
593 m_OutputBindings[outputIndex].second.GetQuantizationOffset());
594 }
595
Narumol Prangnawarat4628d052019-02-25 17:26:05 +0000596 QuantizationParams GetInputQuantizationParams(unsigned int inputIndex = 0u) const
597 {
598 CheckInputIndexIsValid(inputIndex);
599 return std::make_pair(m_InputBindings[inputIndex].second.GetQuantizationScale(),
600 m_InputBindings[inputIndex].second.GetQuantizationOffset());
601 }
602
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000603 std::vector<QuantizationParams> GetAllQuantizationParams() const
604 {
605 std::vector<QuantizationParams> quantizationParams;
606 for (unsigned int i = 0u; i < m_OutputBindings.size(); i++)
607 {
608 quantizationParams.push_back(GetQuantizationParams(i));
609 }
610 return quantizationParams;
telsoa01c577f2c2018-08-31 09:22:23 +0100611 }
612
telsoa014fcda012018-03-09 14:13:49 +0000613private:
telsoa01c577f2c2018-08-31 09:22:23 +0100614 armnn::NetworkId m_NetworkIdentifier;
615 std::shared_ptr<armnn::IRuntime> m_Runtime;
616
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000617 std::vector<InferenceModelInternal::BindingPointInfo> m_InputBindings;
618 std::vector<InferenceModelInternal::BindingPointInfo> m_OutputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100619 bool m_EnableProfiling;
620
telsoa014fcda012018-03-09 14:13:49 +0000621 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000622 armnn::InputTensors MakeInputTensors(const std::vector<TContainer>& inputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000623 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000624 return ::MakeInputTensors(m_InputBindings, inputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000625 }
626
627 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000628 armnn::OutputTensors MakeOutputTensors(std::vector<TContainer>& outputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000629 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000630 return ::MakeOutputTensors(m_OutputBindings, outputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000631 }
James Conroy7b4886f2019-04-11 10:23:58 +0100632
633 std::chrono::high_resolution_clock::time_point GetCurrentTime()
634 {
635 return std::chrono::high_resolution_clock::now();
636 }
637
638 std::chrono::duration<double, std::milli> GetTimeDuration(
639 std::chrono::high_resolution_clock::time_point& start_time,
640 std::chrono::high_resolution_clock::time_point& end_time)
641 {
642 return std::chrono::duration<double, std::milli>(end_time - start_time);
643 }
644
Ferran Balaguerc602f292019-02-08 17:09:55 +0000645};