blob: fd8dedeb29ebc3a15d90c7f1cae49810749eee69 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Sadik Armagana9c2ce12020-07-14 10:02:22 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matteo Martincighc601aa62019-10-29 15:03:22 +00005
telsoa014fcda012018-03-09 14:13:49 +00006#pragma once
Matteo Martincighc601aa62019-10-29 15:03:22 +00007
David Beckf0b48452018-10-19 15:20:56 +01008#include <armnn/ArmNN.hpp>
alered01a7227ac2020-05-07 14:58:29 +01009#include <armnn/Logging.hpp>
10#include <armnn/utility/Timer.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000011#include <armnn/BackendRegistry.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010012#include <armnn/utility/Assert.hpp>
Matthew Sloyan80c6b142020-09-08 12:00:32 +010013#include <armnn/utility/NumericCast.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010014
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000015#if defined(ARMNN_SERIALIZER)
Derek Lamberti0028d1b2019-02-20 13:57:42 +000016#include "armnnDeserializer/IDeserializer.hpp"
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000017#endif
telsoa01c577f2c2018-08-31 09:22:23 +010018#if defined(ARMNN_TF_LITE_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010019#include <armnnTfLiteParser/ITfLiteParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010020#endif
telsoa01c577f2c2018-08-31 09:22:23 +010021#if defined(ARMNN_ONNX_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010022#include <armnnOnnxParser/IOnnxParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010023#endif
telsoa014fcda012018-03-09 14:13:49 +000024
Francis Murtagh532a29d2020-06-29 11:50:01 +010025#include <Filesystem.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000026#include <HeapProfiling.hpp>
Jim Flynn2fd61002019-05-03 12:54:26 +010027#include <TensorIOUtils.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000028
David Monahana8837bf2020-04-16 10:01:56 +010029#include "armnn/utility/StringUtils.hpp"
surmeh013537c2c2018-05-18 16:31:43 +010030#include <boost/exception/exception.hpp>
31#include <boost/exception/diagnostic_information.hpp>
telsoa014fcda012018-03-09 14:13:49 +000032#include <boost/program_options.hpp>
James Ward08f40162020-09-07 16:45:07 +010033#include <fmt/format.h>
James Ward6d9f5c52020-09-28 11:56:35 +010034#include <mapbox/variant.hpp>
telsoa014fcda012018-03-09 14:13:49 +000035
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000036#include <algorithm>
37#include <iterator>
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010038#include <fstream>
telsoa014fcda012018-03-09 14:13:49 +000039#include <map>
40#include <string>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000041#include <vector>
telsoa01c577f2c2018-08-31 09:22:23 +010042#include <type_traits>
43
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010044namespace
45{
46
47inline bool CheckRequestedBackendsAreValid(const std::vector<armnn::BackendId>& backendIds,
48 armnn::Optional<std::string&> invalidBackendIds = armnn::EmptyOptional())
49{
50 if (backendIds.empty())
51 {
52 return false;
53 }
54
55 armnn::BackendIdSet validBackendIds = armnn::BackendRegistryInstance().GetBackendIds();
56
57 bool allValid = true;
58 for (const auto& backendId : backendIds)
59 {
60 if (std::find(validBackendIds.begin(), validBackendIds.end(), backendId) == validBackendIds.end())
61 {
62 allValid = false;
63 if (invalidBackendIds)
64 {
65 if (!invalidBackendIds.value().empty())
66 {
67 invalidBackendIds.value() += ", ";
68 }
69 invalidBackendIds.value() += backendId;
70 }
71 }
72 }
73 return allValid;
74}
75
76} // anonymous namespace
77
telsoa01c577f2c2018-08-31 09:22:23 +010078namespace InferenceModelInternal
79{
Jim Flynnb4d7eae2019-05-01 14:44:27 +010080using BindingPointInfo = armnn::BindingPointInfo;
telsoa01c577f2c2018-08-31 09:22:23 +010081
82using QuantizationParams = std::pair<float,int32_t>;
83
84struct Params
85{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000086 std::string m_ModelPath;
87 std::vector<std::string> m_InputBindings;
88 std::vector<armnn::TensorShape> m_InputShapes;
89 std::vector<std::string> m_OutputBindings;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000090 std::vector<armnn::BackendId> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +010091 std::string m_DynamicBackendsPath;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000092 size_t m_SubgraphId;
93 bool m_IsModelBinary;
94 bool m_VisualizePostOptimizationModel;
95 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +000096 bool m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +010097 bool m_PrintIntermediateLayers;
Derek Lamberti132563c2019-12-02 16:06:40 +000098 bool m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +010099 bool m_InferOutputShape;
Sadik Armagana25886e2020-09-15 17:17:08 +0100100 bool m_EnableFastMath;
telsoa01c577f2c2018-08-31 09:22:23 +0100101
102 Params()
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100103 : m_ComputeDevices{}
telsoa01c577f2c2018-08-31 09:22:23 +0100104 , m_SubgraphId(0)
105 , m_IsModelBinary(true)
106 , m_VisualizePostOptimizationModel(false)
107 , m_EnableFp16TurboMode(false)
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000108 , m_EnableBf16TurboMode(false)
Matthew Jackson54658b92019-08-27 15:35:59 +0100109 , m_PrintIntermediateLayers(false)
Derek Lamberti132563c2019-12-02 16:06:40 +0000110 , m_ParseUnsupported(false)
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100111 , m_InferOutputShape(false)
Sadik Armagana25886e2020-09-15 17:17:08 +0100112 , m_EnableFastMath(false)
telsoa01c577f2c2018-08-31 09:22:23 +0100113 {}
114};
115
116} // namespace InferenceModelInternal
117
118template <typename IParser>
119struct CreateNetworkImpl
120{
121public:
122 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100123
124 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100125 std::vector<armnn::BindingPointInfo>& inputBindings,
126 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100127 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000128 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100129
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000130 // Create a network from a file on disk
131 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100132
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000133 std::map<std::string, armnn::TensorShape> inputShapes;
134 if (!params.m_InputShapes.empty())
135 {
136 const size_t numInputShapes = params.m_InputShapes.size();
137 const size_t numInputBindings = params.m_InputBindings.size();
138 if (numInputShapes < numInputBindings)
139 {
James Ward08f40162020-09-07 16:45:07 +0100140 throw armnn::Exception(fmt::format(
141 "Not every input has its tensor shape specified: expected={0}, got={1}",
142 numInputBindings, numInputShapes));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000143 }
telsoa01c577f2c2018-08-31 09:22:23 +0100144
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000145 for (size_t i = 0; i < numInputShapes; i++)
146 {
147 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
148 }
149 }
telsoa01c577f2c2018-08-31 09:22:23 +0100150
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000151 std::vector<std::string> requestedOutputs = params.m_OutputBindings;
152 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
153
154 {
155 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
156 // Handle text and binary input differently by calling the corresponding parser function
157 network = (params.m_IsModelBinary ?
158 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
159 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
160 }
161
162 for (const std::string& inputLayerName : params.m_InputBindings)
163 {
164 inputBindings.push_back(parser->GetNetworkInputBindingInfo(inputLayerName));
165 }
166
167 for (const std::string& outputLayerName : params.m_OutputBindings)
168 {
169 outputBindings.push_back(parser->GetNetworkOutputBindingInfo(outputLayerName));
170 }
171
172 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100173 }
174};
175
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000176#if defined(ARMNN_SERIALIZER)
177template <>
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000178struct CreateNetworkImpl<armnnDeserializer::IDeserializer>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000179{
180public:
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000181 using IParser = armnnDeserializer::IDeserializer;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000182 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000183
184 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100185 std::vector<armnn::BindingPointInfo>& inputBindings,
186 std::vector<armnn::BindingPointInfo>& outputBindings)
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000187 {
188 auto parser(IParser::Create());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100189 ARMNN_ASSERT(parser);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000190
191 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
192
193 {
194 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000195
Francis Murtagh532a29d2020-06-29 11:50:01 +0100196 std::error_code errorCode;
197 fs::path pathToFile(params.m_ModelPath);
198 if (!fs::exists(pathToFile, errorCode))
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000199 {
James Ward08f40162020-09-07 16:45:07 +0100200 throw armnn::FileNotFoundException(fmt::format("Cannot find the file ({0}) errorCode: {1} {2}",
201 params.m_ModelPath,
202 errorCode.message(),
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000203 CHECK_LOCATION().AsString()));
204 }
205 std::ifstream file(params.m_ModelPath, std::ios::binary);
206
207 network = parser->CreateNetworkFromBinary(file);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000208 }
209
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100210 unsigned int subgraphId = armnn::numeric_cast<unsigned int>(params.m_SubgraphId);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000211
212 for (const std::string& inputLayerName : params.m_InputBindings)
213 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000214 armnnDeserializer::BindingPointInfo inputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100215 parser->GetNetworkInputBindingInfo(subgraphId, inputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000216 inputBindings.push_back(std::make_pair(inputBinding.m_BindingId, inputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000217 }
218
219 for (const std::string& outputLayerName : params.m_OutputBindings)
220 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000221 armnnDeserializer::BindingPointInfo outputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100222 parser->GetNetworkOutputBindingInfo(subgraphId, outputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000223 outputBindings.push_back(std::make_pair(outputBinding.m_BindingId, outputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000224 }
225
226 return network;
227 }
228};
229#endif
230
telsoa01c577f2c2018-08-31 09:22:23 +0100231#if defined(ARMNN_TF_LITE_PARSER)
232template <>
233struct CreateNetworkImpl<armnnTfLiteParser::ITfLiteParser>
234{
235public:
236 using IParser = armnnTfLiteParser::ITfLiteParser;
237 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100238
239 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100240 std::vector<armnn::BindingPointInfo>& inputBindings,
241 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100242 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000243 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100244
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000245 // Create a network from a file on disk
Derek Lamberti132563c2019-12-02 16:06:40 +0000246 IParser::TfLiteParserOptions options;
247 options.m_StandInLayerForUnsupported = params.m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100248 options.m_InferAndValidate = params.m_InferOutputShape;
Derek Lamberti132563c2019-12-02 16:06:40 +0000249 auto parser(IParser::Create(options));
telsoa01c577f2c2018-08-31 09:22:23 +0100250
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000251 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100252
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000253 {
254 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
255 network = parser->CreateNetworkFromBinaryFile(modelPath.c_str());
256 }
telsoa01c577f2c2018-08-31 09:22:23 +0100257
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000258 for (const std::string& inputLayerName : params.m_InputBindings)
259 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100260 armnn::BindingPointInfo inputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000261 parser->GetNetworkInputBindingInfo(params.m_SubgraphId, inputLayerName);
262 inputBindings.push_back(inputBinding);
263 }
264
265 for (const std::string& outputLayerName : params.m_OutputBindings)
266 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100267 armnn::BindingPointInfo outputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000268 parser->GetNetworkOutputBindingInfo(params.m_SubgraphId, outputLayerName);
269 outputBindings.push_back(outputBinding);
270 }
271
272 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100273 }
274};
275#endif
276
277#if defined(ARMNN_ONNX_PARSER)
278template <>
279struct CreateNetworkImpl<armnnOnnxParser::IOnnxParser>
280{
281public:
282 using IParser = armnnOnnxParser::IOnnxParser;
283 using Params = InferenceModelInternal::Params;
284 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
285
286 static armnn::INetworkPtr Create(const Params& params,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000287 std::vector<BindingPointInfo>& inputBindings,
288 std::vector<BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100289 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000290 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100291
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000292 // Create a network from a file on disk
293 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100294
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000295 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100296
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000297 {
298 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
299 network = (params.m_IsModelBinary ?
300 parser->CreateNetworkFromBinaryFile(modelPath.c_str()) :
301 parser->CreateNetworkFromTextFile(modelPath.c_str()));
302 }
telsoa01c577f2c2018-08-31 09:22:23 +0100303
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000304 for (const std::string& inputLayerName : params.m_InputBindings)
305 {
306 BindingPointInfo inputBinding = parser->GetNetworkInputBindingInfo(inputLayerName);
307 inputBindings.push_back(inputBinding);
308 }
309
310 for (const std::string& outputLayerName : params.m_OutputBindings)
311 {
312 BindingPointInfo outputBinding = parser->GetNetworkOutputBindingInfo(outputLayerName);
313 outputBindings.push_back(outputBinding);
314 }
315
316 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100317 }
318};
319#endif
telsoa014fcda012018-03-09 14:13:49 +0000320
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000321
telsoa014fcda012018-03-09 14:13:49 +0000322
323template <typename IParser, typename TDataType>
324class InferenceModel
325{
326public:
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000327 using DataType = TDataType;
328 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000329 using QuantizationParams = InferenceModelInternal::QuantizationParams;
James Ward6d9f5c52020-09-28 11:56:35 +0100330 using TContainer = mapbox::util::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
telsoa014fcda012018-03-09 14:13:49 +0000331
332 struct CommandLineOptions
333 {
334 std::string m_ModelDir;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000335 std::vector<std::string> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100336 std::string m_DynamicBackendsPath;
surmeh013537c2c2018-05-18 16:31:43 +0100337 bool m_VisualizePostOptimizationModel;
telsoa01c577f2c2018-08-31 09:22:23 +0100338 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000339 bool m_EnableBf16TurboMode;
Pablo Tello507f39d2019-04-15 15:44:39 +0100340 std::string m_Labels;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000341
342 std::vector<armnn::BackendId> GetComputeDevicesAsBackendIds()
343 {
344 std::vector<armnn::BackendId> backendIds;
345 std::copy(m_ComputeDevices.begin(), m_ComputeDevices.end(), std::back_inserter(backendIds));
346 return backendIds;
347 }
telsoa014fcda012018-03-09 14:13:49 +0000348 };
349
350 static void AddCommandLineOptions(boost::program_options::options_description& desc, CommandLineOptions& options)
351 {
352 namespace po = boost::program_options;
353
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000354 const std::vector<std::string> defaultComputes = { "CpuAcc", "CpuRef" };
David Beckf0b48452018-10-19 15:20:56 +0100355
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100356 const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
357 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
358
telsoa014fcda012018-03-09 14:13:49 +0000359 desc.add_options()
360 ("model-dir,m", po::value<std::string>(&options.m_ModelDir)->required(),
telsoa01c577f2c2018-08-31 09:22:23 +0100361 "Path to directory containing model files (.caffemodel/.prototxt/.tflite)")
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000362 ("compute,c", po::value<std::vector<std::string>>(&options.m_ComputeDevices)->
David Monahana8837bf2020-04-16 10:01:56 +0100363 default_value(defaultComputes, armnn::stringUtils::StringConcat(defaultComputes, ", "))->
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000364 multitoken(), backendsMessage.c_str())
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100365 ("dynamic-backends-path,b", po::value(&options.m_DynamicBackendsPath),
366 "Path where to load any available dynamic backend from. "
367 "If left empty (the default), dynamic backends will not be used.")
Pablo Tello507f39d2019-04-15 15:44:39 +0100368 ("labels,l", po::value<std::string>(&options.m_Labels),
369 "Text file containing one image filename - correct label pair per line, "
370 "used to test the accuracy of the network.")
surmeh013537c2c2018-05-18 16:31:43 +0100371 ("visualize-optimized-model,v",
372 po::value<bool>(&options.m_VisualizePostOptimizationModel)->default_value(false),
373 "Produce a dot file useful for visualizing the graph post optimization."
telsoa01c577f2c2018-08-31 09:22:23 +0100374 "The file will have the same name as the model with the .dot extention.")
375 ("fp16-turbo-mode", po::value<bool>(&options.m_EnableFp16TurboMode)->default_value(false),
376 "If this option is enabled FP32 layers, weights and biases will be converted "
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000377 "to FP16 where the backend supports it.")
378 ("bf16-turbo-mode", po::value<bool>(&options.m_EnableBf16TurboMode)->default_value(false),
379 "If this option is enabled FP32 layers, weights and biases will be converted "
380 "to BF16 where the backend supports it.");
telsoa014fcda012018-03-09 14:13:49 +0000381 }
382
Matthew Bentham3e68b972019-04-09 13:10:46 +0100383 InferenceModel(const Params& params,
384 bool enableProfiling,
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100385 const std::string& dynamicBackendsPath,
Matthew Bentham3e68b972019-04-09 13:10:46 +0100386 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
387 : m_EnableProfiling(enableProfiling)
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100388 , m_DynamicBackendsPath(dynamicBackendsPath)
telsoa014fcda012018-03-09 14:13:49 +0000389 {
telsoa01c577f2c2018-08-31 09:22:23 +0100390 if (runtime)
telsoa014fcda012018-03-09 14:13:49 +0000391 {
telsoa01c577f2c2018-08-31 09:22:23 +0100392 m_Runtime = runtime;
telsoa014fcda012018-03-09 14:13:49 +0000393 }
telsoa01c577f2c2018-08-31 09:22:23 +0100394 else
telsoa014fcda012018-03-09 14:13:49 +0000395 {
telsoa01c577f2c2018-08-31 09:22:23 +0100396 armnn::IRuntime::CreationOptions options;
Nina Drozd549ae372018-09-10 14:26:44 +0100397 options.m_EnableGpuProfiling = m_EnableProfiling;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100398 options.m_DynamicBackendsPath = m_DynamicBackendsPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100399 m_Runtime = std::move(armnn::IRuntime::Create(options));
surmeh013537c2c2018-05-18 16:31:43 +0100400 }
telsoa014fcda012018-03-09 14:13:49 +0000401
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100402 std::string invalidBackends;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000403 if (!CheckRequestedBackendsAreValid(params.m_ComputeDevices, armnn::Optional<std::string&>(invalidBackends)))
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100404 {
405 throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends);
406 }
407
alered01a7227ac2020-05-07 14:58:29 +0100408 const auto parsing_start_time = armnn::GetTimeNow();
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100409 armnn::INetworkPtr network = CreateNetworkImpl<IParser>::Create(params, m_InputBindings, m_OutputBindings);
telsoa014fcda012018-03-09 14:13:49 +0000410
alered01a7227ac2020-05-07 14:58:29 +0100411 ARMNN_LOG(info) << "Network parsing time: " << std::setprecision(2)
412 << std::fixed << armnn::GetTimeDuration(parsing_start_time).count() << " ms\n";
413
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100414 armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork*){}};
surmeh013537c2c2018-05-18 16:31:43 +0100415 {
416 ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
telsoa01c577f2c2018-08-31 09:22:23 +0100417
418 armnn::OptimizerOptions options;
419 options.m_ReduceFp32ToFp16 = params.m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000420 options.m_ReduceFp32ToBf16 = params.m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +0100421 options.m_Debug = params.m_PrintIntermediateLayers;
telsoa01c577f2c2018-08-31 09:22:23 +0100422
Sadik Armagana25886e2020-09-15 17:17:08 +0100423 armnn::BackendOptions gpuAcc("GpuAcc",
424 {
425 { "FastMathEnabled", params.m_EnableFastMath }
426 });
427 armnn::BackendOptions cpuAcc("CpuAcc",
428 {
429 { "FastMathEnabled", params.m_EnableFastMath }
430 });
431 options.m_ModelOptions.push_back(gpuAcc);
432 options.m_ModelOptions.push_back(cpuAcc);
433
alered01a7227ac2020-05-07 14:58:29 +0100434 const auto optimization_start_time = armnn::GetTimeNow();
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000435 optNet = armnn::Optimize(*network, params.m_ComputeDevices, m_Runtime->GetDeviceSpec(), options);
alered01a7227ac2020-05-07 14:58:29 +0100436
437 ARMNN_LOG(info) << "Optimization time: " << std::setprecision(2)
438 << std::fixed << armnn::GetTimeDuration(optimization_start_time).count() << " ms\n";
439
telsoa01c577f2c2018-08-31 09:22:23 +0100440 if (!optNet)
441 {
442 throw armnn::Exception("Optimize returned nullptr");
443 }
surmeh013537c2c2018-05-18 16:31:43 +0100444 }
telsoa014fcda012018-03-09 14:13:49 +0000445
surmeh013537c2c2018-05-18 16:31:43 +0100446 if (params.m_VisualizePostOptimizationModel)
447 {
Francis Murtagh532a29d2020-06-29 11:50:01 +0100448 fs::path filename = params.m_ModelPath;
surmeh013537c2c2018-05-18 16:31:43 +0100449 filename.replace_extension("dot");
Rob Hughes9e10c2b2019-07-23 15:37:19 +0100450 std::fstream file(filename.c_str(), std::ios_base::out);
surmeh013537c2c2018-05-18 16:31:43 +0100451 optNet->SerializeToDot(file);
452 }
453
454 armnn::Status ret;
455 {
456 ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");
457 ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet));
458 }
459
telsoa014fcda012018-03-09 14:13:49 +0000460 if (ret == armnn::Status::Failure)
461 {
462 throw armnn::Exception("IRuntime::LoadNetwork failed");
463 }
464 }
465
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000466 void CheckInputIndexIsValid(unsigned int inputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000467 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000468 if (m_InputBindings.size() < inputIndex + 1)
469 {
James Ward08f40162020-09-07 16:45:07 +0100470 throw armnn::Exception(fmt::format("Input index out of range: {}", inputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000471 }
telsoa014fcda012018-03-09 14:13:49 +0000472 }
473
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000474 void CheckOutputIndexIsValid(unsigned int outputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000475 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000476 if (m_OutputBindings.size() < outputIndex + 1)
477 {
James Ward08f40162020-09-07 16:45:07 +0100478 throw armnn::Exception(fmt::format("Output index out of range: {}", outputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000479 }
480 }
481
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100482 unsigned int GetInputSize(unsigned int inputIndex = 0u) const
483 {
484 CheckInputIndexIsValid(inputIndex);
485 return m_InputBindings[inputIndex].second.GetNumElements();
486 }
487
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000488 unsigned int GetOutputSize(unsigned int outputIndex = 0u) const
489 {
490 CheckOutputIndexIsValid(outputIndex);
491 return m_OutputBindings[outputIndex].second.GetNumElements();
492 }
493
James Conroy7b4886f2019-04-11 10:23:58 +0100494 std::chrono::duration<double, std::milli> Run(
495 const std::vector<TContainer>& inputContainers,
496 std::vector<TContainer>& outputContainers)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000497 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000498 for (unsigned int i = 0; i < outputContainers.size(); ++i)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000499 {
500 const unsigned int expectedOutputDataSize = GetOutputSize(i);
Ferran Balaguerc602f292019-02-08 17:09:55 +0000501
James Ward6d9f5c52020-09-28 11:56:35 +0100502 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000503 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100504 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
Ferran Balaguerc602f292019-02-08 17:09:55 +0000505 if (actualOutputDataSize < expectedOutputDataSize)
506 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100507 unsigned int outputIndex = i;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000508 throw armnn::Exception(
James Ward08f40162020-09-07 16:45:07 +0100509 fmt::format("Not enough data for output #{0}: expected "
510 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
Ferran Balaguerc602f292019-02-08 17:09:55 +0000511 }
512 },
513 outputContainers[i]);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000514 }
telsoa01c577f2c2018-08-31 09:22:23 +0100515
516 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
517 if (profiler)
518 {
519 profiler->EnableProfiling(m_EnableProfiling);
520 }
521
James Conroy7b4886f2019-04-11 10:23:58 +0100522 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
alered01a7227ac2020-05-07 14:58:29 +0100523 const auto start_time = armnn::GetTimeNow();
James Conroy7b4886f2019-04-11 10:23:58 +0100524
telsoa014fcda012018-03-09 14:13:49 +0000525 armnn::Status ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000526 MakeInputTensors(inputContainers),
527 MakeOutputTensors(outputContainers));
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100528
alered01a7227ac2020-05-07 14:58:29 +0100529 const auto duration = armnn::GetTimeDuration(start_time);
James Conroy7b4886f2019-04-11 10:23:58 +0100530
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100531 // if profiling is enabled print out the results
532 if (profiler && profiler->IsProfilingEnabled())
533 {
534 profiler->Print(std::cout);
535 }
536
telsoa014fcda012018-03-09 14:13:49 +0000537 if (ret == armnn::Status::Failure)
538 {
539 throw armnn::Exception("IRuntime::EnqueueWorkload failed");
540 }
James Conroy7b4886f2019-04-11 10:23:58 +0100541 else
542 {
alered01a7227ac2020-05-07 14:58:29 +0100543 return duration;
James Conroy7b4886f2019-04-11 10:23:58 +0100544 }
telsoa014fcda012018-03-09 14:13:49 +0000545 }
546
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100547 const armnn::BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100548 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000549 CheckInputIndexIsValid(inputIndex);
550 return m_InputBindings[inputIndex];
telsoa01c577f2c2018-08-31 09:22:23 +0100551 }
552
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100553 const std::vector<armnn::BindingPointInfo>& GetInputBindingInfos() const
telsoa01c577f2c2018-08-31 09:22:23 +0100554 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000555 return m_InputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100556 }
557
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100558 const armnn::BindingPointInfo& GetOutputBindingInfo(unsigned int outputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100559 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000560 CheckOutputIndexIsValid(outputIndex);
561 return m_OutputBindings[outputIndex];
562 }
563
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100564 const std::vector<armnn::BindingPointInfo>& GetOutputBindingInfos() const
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000565 {
566 return m_OutputBindings;
567 }
568
569 QuantizationParams GetQuantizationParams(unsigned int outputIndex = 0u) const
570 {
571 CheckOutputIndexIsValid(outputIndex);
572 return std::make_pair(m_OutputBindings[outputIndex].second.GetQuantizationScale(),
573 m_OutputBindings[outputIndex].second.GetQuantizationOffset());
574 }
575
Narumol Prangnawarat4628d052019-02-25 17:26:05 +0000576 QuantizationParams GetInputQuantizationParams(unsigned int inputIndex = 0u) const
577 {
578 CheckInputIndexIsValid(inputIndex);
579 return std::make_pair(m_InputBindings[inputIndex].second.GetQuantizationScale(),
580 m_InputBindings[inputIndex].second.GetQuantizationOffset());
581 }
582
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000583 std::vector<QuantizationParams> GetAllQuantizationParams() const
584 {
585 std::vector<QuantizationParams> quantizationParams;
586 for (unsigned int i = 0u; i < m_OutputBindings.size(); i++)
587 {
588 quantizationParams.push_back(GetQuantizationParams(i));
589 }
590 return quantizationParams;
telsoa01c577f2c2018-08-31 09:22:23 +0100591 }
592
telsoa014fcda012018-03-09 14:13:49 +0000593private:
telsoa01c577f2c2018-08-31 09:22:23 +0100594 armnn::NetworkId m_NetworkIdentifier;
595 std::shared_ptr<armnn::IRuntime> m_Runtime;
596
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100597 std::vector<armnn::BindingPointInfo> m_InputBindings;
598 std::vector<armnn::BindingPointInfo> m_OutputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100599 bool m_EnableProfiling;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100600 std::string m_DynamicBackendsPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100601
telsoa014fcda012018-03-09 14:13:49 +0000602 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000603 armnn::InputTensors MakeInputTensors(const std::vector<TContainer>& inputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000604 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100605 return armnnUtils::MakeInputTensors(m_InputBindings, inputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000606 }
607
608 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000609 armnn::OutputTensors MakeOutputTensors(std::vector<TContainer>& outputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000610 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100611 return armnnUtils::MakeOutputTensors(m_OutputBindings, outputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000612 }
Ferran Balaguerc602f292019-02-08 17:09:55 +0000613};