blob: 9d6096a3ebe0830072e8d6b3aa63c97c2f34c36e [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Sadik Armagana9c2ce12020-07-14 10:02:22 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matteo Martincighc601aa62019-10-29 15:03:22 +00005
telsoa014fcda012018-03-09 14:13:49 +00006#pragma once
Matteo Martincighc601aa62019-10-29 15:03:22 +00007
David Beckf0b48452018-10-19 15:20:56 +01008#include <armnn/ArmNN.hpp>
alered01a7227ac2020-05-07 14:58:29 +01009#include <armnn/Logging.hpp>
10#include <armnn/utility/Timer.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000011#include <armnn/BackendRegistry.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010012#include <armnn/utility/Assert.hpp>
Matthew Sloyan80c6b142020-09-08 12:00:32 +010013#include <armnn/utility/NumericCast.hpp>
Nikhil Raj7dcc6972021-04-30 15:44:24 +010014#include <common/include/ProfilingGuid.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010015
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000016#if defined(ARMNN_SERIALIZER)
Derek Lamberti0028d1b2019-02-20 13:57:42 +000017#include "armnnDeserializer/IDeserializer.hpp"
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000018#endif
telsoa01c577f2c2018-08-31 09:22:23 +010019#if defined(ARMNN_TF_LITE_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010020#include <armnnTfLiteParser/ITfLiteParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010021#endif
telsoa01c577f2c2018-08-31 09:22:23 +010022#if defined(ARMNN_ONNX_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010023#include <armnnOnnxParser/IOnnxParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010024#endif
telsoa014fcda012018-03-09 14:13:49 +000025
Francis Murtagh532a29d2020-06-29 11:50:01 +010026#include <Filesystem.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000027#include <HeapProfiling.hpp>
Jim Flynn2fd61002019-05-03 12:54:26 +010028#include <TensorIOUtils.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000029
David Monahana8837bf2020-04-16 10:01:56 +010030#include "armnn/utility/StringUtils.hpp"
James Wardc89829f2020-10-12 14:17:36 +010031#include <cxxopts/cxxopts.hpp>
32#include "CxxoptsUtils.hpp"
James Ward08f40162020-09-07 16:45:07 +010033#include <fmt/format.h>
James Ward6d9f5c52020-09-28 11:56:35 +010034#include <mapbox/variant.hpp>
telsoa014fcda012018-03-09 14:13:49 +000035
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000036#include <algorithm>
37#include <iterator>
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010038#include <fstream>
telsoa014fcda012018-03-09 14:13:49 +000039#include <map>
40#include <string>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000041#include <vector>
telsoa01c577f2c2018-08-31 09:22:23 +010042#include <type_traits>
43
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010044namespace
45{
46
47inline bool CheckRequestedBackendsAreValid(const std::vector<armnn::BackendId>& backendIds,
48 armnn::Optional<std::string&> invalidBackendIds = armnn::EmptyOptional())
49{
50 if (backendIds.empty())
51 {
52 return false;
53 }
54
55 armnn::BackendIdSet validBackendIds = armnn::BackendRegistryInstance().GetBackendIds();
56
57 bool allValid = true;
58 for (const auto& backendId : backendIds)
59 {
60 if (std::find(validBackendIds.begin(), validBackendIds.end(), backendId) == validBackendIds.end())
61 {
62 allValid = false;
63 if (invalidBackendIds)
64 {
65 if (!invalidBackendIds.value().empty())
66 {
67 invalidBackendIds.value() += ", ";
68 }
69 invalidBackendIds.value() += backendId;
70 }
71 }
72 }
73 return allValid;
74}
75
76} // anonymous namespace
77
telsoa01c577f2c2018-08-31 09:22:23 +010078namespace InferenceModelInternal
79{
Jim Flynnb4d7eae2019-05-01 14:44:27 +010080using BindingPointInfo = armnn::BindingPointInfo;
telsoa01c577f2c2018-08-31 09:22:23 +010081
82using QuantizationParams = std::pair<float,int32_t>;
83
84struct Params
85{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000086 std::string m_ModelPath;
87 std::vector<std::string> m_InputBindings;
88 std::vector<armnn::TensorShape> m_InputShapes;
89 std::vector<std::string> m_OutputBindings;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000090 std::vector<armnn::BackendId> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +010091 std::string m_DynamicBackendsPath;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000092 size_t m_SubgraphId;
93 bool m_IsModelBinary;
94 bool m_VisualizePostOptimizationModel;
95 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +000096 bool m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +010097 bool m_PrintIntermediateLayers;
Derek Lamberti132563c2019-12-02 16:06:40 +000098 bool m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +010099 bool m_InferOutputShape;
Sadik Armagana25886e2020-09-15 17:17:08 +0100100 bool m_EnableFastMath;
Matthew Sloyan42432112021-01-08 10:30:51 +0000101 bool m_SaveCachedNetwork;
102 std::string m_CachedNetworkFilePath;
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000103 unsigned int m_NumberOfThreads;
Finn Williams40646322021-02-11 16:16:42 +0000104 std::string m_MLGOTuningFilePath;
Sadik Armagana04a9d72021-04-27 10:02:10 +0100105 bool m_AsyncEnabled;
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100106 size_t m_ThreadPoolSize;
Finn Williams40646322021-02-11 16:16:42 +0000107
telsoa01c577f2c2018-08-31 09:22:23 +0100108
109 Params()
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100110 : m_ComputeDevices{}
telsoa01c577f2c2018-08-31 09:22:23 +0100111 , m_SubgraphId(0)
112 , m_IsModelBinary(true)
113 , m_VisualizePostOptimizationModel(false)
114 , m_EnableFp16TurboMode(false)
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000115 , m_EnableBf16TurboMode(false)
Matthew Jackson54658b92019-08-27 15:35:59 +0100116 , m_PrintIntermediateLayers(false)
Derek Lamberti132563c2019-12-02 16:06:40 +0000117 , m_ParseUnsupported(false)
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100118 , m_InferOutputShape(false)
Sadik Armagana25886e2020-09-15 17:17:08 +0100119 , m_EnableFastMath(false)
Matthew Sloyan42432112021-01-08 10:30:51 +0000120 , m_SaveCachedNetwork(false)
121 , m_CachedNetworkFilePath("")
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000122 , m_NumberOfThreads(0)
Finn Williams40646322021-02-11 16:16:42 +0000123 , m_MLGOTuningFilePath("")
Sadik Armagana04a9d72021-04-27 10:02:10 +0100124 , m_AsyncEnabled(false)
Kevin May94dd4db2021-05-26 16:01:08 +0100125 , m_ThreadPoolSize(0)
telsoa01c577f2c2018-08-31 09:22:23 +0100126 {}
127};
128
129} // namespace InferenceModelInternal
130
131template <typename IParser>
132struct CreateNetworkImpl
133{
134public:
135 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100136
137 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100138 std::vector<armnn::BindingPointInfo>& inputBindings,
139 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100140 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000141 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100142
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000143 // Create a network from a file on disk
144 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100145
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000146 std::map<std::string, armnn::TensorShape> inputShapes;
147 if (!params.m_InputShapes.empty())
148 {
149 const size_t numInputShapes = params.m_InputShapes.size();
150 const size_t numInputBindings = params.m_InputBindings.size();
151 if (numInputShapes < numInputBindings)
152 {
James Ward08f40162020-09-07 16:45:07 +0100153 throw armnn::Exception(fmt::format(
154 "Not every input has its tensor shape specified: expected={0}, got={1}",
155 numInputBindings, numInputShapes));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000156 }
telsoa01c577f2c2018-08-31 09:22:23 +0100157
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000158 for (size_t i = 0; i < numInputShapes; i++)
159 {
160 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
161 }
162 }
telsoa01c577f2c2018-08-31 09:22:23 +0100163
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000164 std::vector<std::string> requestedOutputs = params.m_OutputBindings;
165 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
166
167 {
168 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
169 // Handle text and binary input differently by calling the corresponding parser function
170 network = (params.m_IsModelBinary ?
171 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
172 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
173 }
174
175 for (const std::string& inputLayerName : params.m_InputBindings)
176 {
177 inputBindings.push_back(parser->GetNetworkInputBindingInfo(inputLayerName));
178 }
179
180 for (const std::string& outputLayerName : params.m_OutputBindings)
181 {
182 outputBindings.push_back(parser->GetNetworkOutputBindingInfo(outputLayerName));
183 }
184
185 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100186 }
187};
188
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000189#if defined(ARMNN_SERIALIZER)
190template <>
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000191struct CreateNetworkImpl<armnnDeserializer::IDeserializer>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000192{
193public:
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000194 using IParser = armnnDeserializer::IDeserializer;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000195 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000196
197 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100198 std::vector<armnn::BindingPointInfo>& inputBindings,
199 std::vector<armnn::BindingPointInfo>& outputBindings)
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000200 {
201 auto parser(IParser::Create());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100202 ARMNN_ASSERT(parser);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000203
204 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
205
206 {
207 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000208
Francis Murtagh532a29d2020-06-29 11:50:01 +0100209 std::error_code errorCode;
210 fs::path pathToFile(params.m_ModelPath);
211 if (!fs::exists(pathToFile, errorCode))
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000212 {
James Ward08f40162020-09-07 16:45:07 +0100213 throw armnn::FileNotFoundException(fmt::format("Cannot find the file ({0}) errorCode: {1} {2}",
214 params.m_ModelPath,
215 errorCode.message(),
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000216 CHECK_LOCATION().AsString()));
217 }
218 std::ifstream file(params.m_ModelPath, std::ios::binary);
219
220 network = parser->CreateNetworkFromBinary(file);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000221 }
222
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100223 unsigned int subgraphId = armnn::numeric_cast<unsigned int>(params.m_SubgraphId);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000224
225 for (const std::string& inputLayerName : params.m_InputBindings)
226 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000227 armnnDeserializer::BindingPointInfo inputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100228 parser->GetNetworkInputBindingInfo(subgraphId, inputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000229 inputBindings.push_back(std::make_pair(inputBinding.m_BindingId, inputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000230 }
231
232 for (const std::string& outputLayerName : params.m_OutputBindings)
233 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000234 armnnDeserializer::BindingPointInfo outputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100235 parser->GetNetworkOutputBindingInfo(subgraphId, outputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000236 outputBindings.push_back(std::make_pair(outputBinding.m_BindingId, outputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000237 }
238
239 return network;
240 }
241};
242#endif
243
telsoa01c577f2c2018-08-31 09:22:23 +0100244#if defined(ARMNN_TF_LITE_PARSER)
245template <>
246struct CreateNetworkImpl<armnnTfLiteParser::ITfLiteParser>
247{
248public:
249 using IParser = armnnTfLiteParser::ITfLiteParser;
250 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100251
252 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100253 std::vector<armnn::BindingPointInfo>& inputBindings,
254 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100255 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000256 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100257
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000258 // Create a network from a file on disk
Derek Lamberti132563c2019-12-02 16:06:40 +0000259 IParser::TfLiteParserOptions options;
260 options.m_StandInLayerForUnsupported = params.m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100261 options.m_InferAndValidate = params.m_InferOutputShape;
Derek Lamberti132563c2019-12-02 16:06:40 +0000262 auto parser(IParser::Create(options));
telsoa01c577f2c2018-08-31 09:22:23 +0100263
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000264 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100265
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000266 {
267 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
268 network = parser->CreateNetworkFromBinaryFile(modelPath.c_str());
269 }
telsoa01c577f2c2018-08-31 09:22:23 +0100270
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000271 for (const std::string& inputLayerName : params.m_InputBindings)
272 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100273 armnn::BindingPointInfo inputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000274 parser->GetNetworkInputBindingInfo(params.m_SubgraphId, inputLayerName);
275 inputBindings.push_back(inputBinding);
276 }
277
278 for (const std::string& outputLayerName : params.m_OutputBindings)
279 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100280 armnn::BindingPointInfo outputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000281 parser->GetNetworkOutputBindingInfo(params.m_SubgraphId, outputLayerName);
282 outputBindings.push_back(outputBinding);
283 }
284
285 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100286 }
287};
288#endif
289
290#if defined(ARMNN_ONNX_PARSER)
291template <>
292struct CreateNetworkImpl<armnnOnnxParser::IOnnxParser>
293{
294public:
295 using IParser = armnnOnnxParser::IOnnxParser;
296 using Params = InferenceModelInternal::Params;
297 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
298
299 static armnn::INetworkPtr Create(const Params& params,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000300 std::vector<BindingPointInfo>& inputBindings,
301 std::vector<BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100302 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000303 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100304
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000305 // Create a network from a file on disk
306 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100307
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000308 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100309
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000310 {
311 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
312 network = (params.m_IsModelBinary ?
313 parser->CreateNetworkFromBinaryFile(modelPath.c_str()) :
314 parser->CreateNetworkFromTextFile(modelPath.c_str()));
315 }
telsoa01c577f2c2018-08-31 09:22:23 +0100316
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000317 for (const std::string& inputLayerName : params.m_InputBindings)
318 {
319 BindingPointInfo inputBinding = parser->GetNetworkInputBindingInfo(inputLayerName);
320 inputBindings.push_back(inputBinding);
321 }
322
323 for (const std::string& outputLayerName : params.m_OutputBindings)
324 {
325 BindingPointInfo outputBinding = parser->GetNetworkOutputBindingInfo(outputLayerName);
326 outputBindings.push_back(outputBinding);
327 }
328
329 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100330 }
331};
332#endif
telsoa014fcda012018-03-09 14:13:49 +0000333
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000334
telsoa014fcda012018-03-09 14:13:49 +0000335
336template <typename IParser, typename TDataType>
337class InferenceModel
338{
339public:
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000340 using DataType = TDataType;
341 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000342 using QuantizationParams = InferenceModelInternal::QuantizationParams;
Finn Williamsf806c4d2021-02-22 15:13:12 +0000343 using TContainer
344 = mapbox::util::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>, std::vector<int8_t>>;
telsoa014fcda012018-03-09 14:13:49 +0000345
346 struct CommandLineOptions
347 {
348 std::string m_ModelDir;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000349 std::vector<std::string> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100350 std::string m_DynamicBackendsPath;
surmeh013537c2c2018-05-18 16:31:43 +0100351 bool m_VisualizePostOptimizationModel;
telsoa01c577f2c2018-08-31 09:22:23 +0100352 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000353 bool m_EnableBf16TurboMode;
Pablo Tello507f39d2019-04-15 15:44:39 +0100354 std::string m_Labels;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000355
356 std::vector<armnn::BackendId> GetComputeDevicesAsBackendIds()
357 {
358 std::vector<armnn::BackendId> backendIds;
359 std::copy(m_ComputeDevices.begin(), m_ComputeDevices.end(), std::back_inserter(backendIds));
360 return backendIds;
361 }
telsoa014fcda012018-03-09 14:13:49 +0000362 };
363
James Wardc89829f2020-10-12 14:17:36 +0100364 static void AddCommandLineOptions(cxxopts::Options& options,
365 CommandLineOptions& cLineOptions, std::vector<std::string>& required)
telsoa014fcda012018-03-09 14:13:49 +0000366 {
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000367 const std::vector<std::string> defaultComputes = { "CpuAcc", "CpuRef" };
David Beckf0b48452018-10-19 15:20:56 +0100368
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100369 const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
370 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
371
James Wardc89829f2020-10-12 14:17:36 +0100372 options
373 .allow_unrecognised_options()
374 .add_options()
Nikhil Raj6dd178f2021-04-02 22:04:39 +0100375 ("m,model-dir", "Path to directory containing model files (.prototxt/.tflite)",
James Wardc89829f2020-10-12 14:17:36 +0100376 cxxopts::value<std::string>(cLineOptions.m_ModelDir))
377 ("c,compute", backendsMessage.c_str(),
378 cxxopts::value<std::vector<std::string>>(cLineOptions.m_ComputeDevices)->default_value("CpuRef"))
379 ("b,dynamic-backends-path",
380 "Path where to load any available dynamic backend from. "
381 "If left empty (the default), dynamic backends will not be used.",
382 cxxopts::value(cLineOptions.m_DynamicBackendsPath))
383 ("l,labels",
384 "Text file containing one image filename - correct label pair per line, "
385 "used to test the accuracy of the network.", cxxopts::value<std::string>(cLineOptions.m_Labels))
386 ("v,visualize-optimized-model",
387 "Produce a dot file useful for visualizing the graph post optimization."
388 "The file will have the same name as the model with the .dot extention.",
389 cxxopts::value<bool>(cLineOptions.m_VisualizePostOptimizationModel)->default_value("false"))
390 ("fp16-turbo-mode",
391 "If this option is enabled FP32 layers, weights and biases will be converted "
392 "to FP16 where the backend supports it.",
393 cxxopts::value<bool>(cLineOptions.m_EnableFp16TurboMode)->default_value("false"))
394 ("bf16-turbo-mode",
395 "If this option is enabled FP32 layers, weights and biases will be converted "
396 "to BF16 where the backend supports it.",
397 cxxopts::value<bool>(cLineOptions.m_EnableBf16TurboMode)->default_value("false"));
398
399 required.emplace_back("model-dir");
telsoa014fcda012018-03-09 14:13:49 +0000400 }
401
Matthew Bentham3e68b972019-04-09 13:10:46 +0100402 InferenceModel(const Params& params,
403 bool enableProfiling,
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100404 const std::string& dynamicBackendsPath,
Matthew Bentham3e68b972019-04-09 13:10:46 +0100405 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
406 : m_EnableProfiling(enableProfiling)
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100407 , m_DynamicBackendsPath(dynamicBackendsPath)
telsoa014fcda012018-03-09 14:13:49 +0000408 {
telsoa01c577f2c2018-08-31 09:22:23 +0100409 if (runtime)
telsoa014fcda012018-03-09 14:13:49 +0000410 {
telsoa01c577f2c2018-08-31 09:22:23 +0100411 m_Runtime = runtime;
telsoa014fcda012018-03-09 14:13:49 +0000412 }
telsoa01c577f2c2018-08-31 09:22:23 +0100413 else
telsoa014fcda012018-03-09 14:13:49 +0000414 {
telsoa01c577f2c2018-08-31 09:22:23 +0100415 armnn::IRuntime::CreationOptions options;
Nina Drozd549ae372018-09-10 14:26:44 +0100416 options.m_EnableGpuProfiling = m_EnableProfiling;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100417 options.m_DynamicBackendsPath = m_DynamicBackendsPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100418 m_Runtime = std::move(armnn::IRuntime::Create(options));
surmeh013537c2c2018-05-18 16:31:43 +0100419 }
telsoa014fcda012018-03-09 14:13:49 +0000420
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100421 std::string invalidBackends;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000422 if (!CheckRequestedBackendsAreValid(params.m_ComputeDevices, armnn::Optional<std::string&>(invalidBackends)))
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100423 {
424 throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends);
425 }
426
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100427 armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork*){}};
surmeh013537c2c2018-05-18 16:31:43 +0100428 {
Finn Williams4422cec2021-03-22 17:51:06 +0000429 const auto parsing_start_time = armnn::GetTimeNow();
430 armnn::INetworkPtr network = CreateNetworkImpl<IParser>::Create(params, m_InputBindings, m_OutputBindings);
431
432 ARMNN_LOG(info) << "Network parsing time: " << std::setprecision(2)
433 << std::fixed << armnn::GetTimeDuration(parsing_start_time).count() << " ms\n";
434
surmeh013537c2c2018-05-18 16:31:43 +0100435 ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
telsoa01c577f2c2018-08-31 09:22:23 +0100436
437 armnn::OptimizerOptions options;
438 options.m_ReduceFp32ToFp16 = params.m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000439 options.m_ReduceFp32ToBf16 = params.m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +0100440 options.m_Debug = params.m_PrintIntermediateLayers;
telsoa01c577f2c2018-08-31 09:22:23 +0100441
Sadik Armagana25886e2020-09-15 17:17:08 +0100442 armnn::BackendOptions gpuAcc("GpuAcc",
443 {
Matthew Sloyan42432112021-01-08 10:30:51 +0000444 { "FastMathEnabled", params.m_EnableFastMath },
445 { "SaveCachedNetwork", params.m_SaveCachedNetwork },
Finn Williams40646322021-02-11 16:16:42 +0000446 { "CachedNetworkFilePath", params.m_CachedNetworkFilePath },
447 { "MLGOTuningFilePath", params.m_MLGOTuningFilePath }
Sadik Armagana25886e2020-09-15 17:17:08 +0100448 });
Finn Williams40646322021-02-11 16:16:42 +0000449
Sadik Armagana25886e2020-09-15 17:17:08 +0100450 armnn::BackendOptions cpuAcc("CpuAcc",
451 {
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000452 { "FastMathEnabled", params.m_EnableFastMath },
453 { "NumberOfThreads", params.m_NumberOfThreads }
Sadik Armagana25886e2020-09-15 17:17:08 +0100454 });
455 options.m_ModelOptions.push_back(gpuAcc);
456 options.m_ModelOptions.push_back(cpuAcc);
457
alered01a7227ac2020-05-07 14:58:29 +0100458 const auto optimization_start_time = armnn::GetTimeNow();
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000459 optNet = armnn::Optimize(*network, params.m_ComputeDevices, m_Runtime->GetDeviceSpec(), options);
alered01a7227ac2020-05-07 14:58:29 +0100460
461 ARMNN_LOG(info) << "Optimization time: " << std::setprecision(2)
462 << std::fixed << armnn::GetTimeDuration(optimization_start_time).count() << " ms\n";
463
telsoa01c577f2c2018-08-31 09:22:23 +0100464 if (!optNet)
465 {
466 throw armnn::Exception("Optimize returned nullptr");
467 }
Finn Williams4422cec2021-03-22 17:51:06 +0000468
469
surmeh013537c2c2018-05-18 16:31:43 +0100470 }
telsoa014fcda012018-03-09 14:13:49 +0000471
surmeh013537c2c2018-05-18 16:31:43 +0100472 if (params.m_VisualizePostOptimizationModel)
473 {
Francis Murtagh532a29d2020-06-29 11:50:01 +0100474 fs::path filename = params.m_ModelPath;
surmeh013537c2c2018-05-18 16:31:43 +0100475 filename.replace_extension("dot");
Rob Hughes9e10c2b2019-07-23 15:37:19 +0100476 std::fstream file(filename.c_str(), std::ios_base::out);
surmeh013537c2c2018-05-18 16:31:43 +0100477 optNet->SerializeToDot(file);
478 }
479
480 armnn::Status ret;
481 {
482 ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");
Matthew Sloyan4f29f152021-01-18 16:10:20 +0000483
484 const auto loading_start_time = armnn::GetTimeNow();
Francis Murtagh73d3e2e2021-04-29 14:23:04 +0100485 armnn::INetworkProperties networkProperties(params.m_AsyncEnabled,
486 armnn::MemorySource::Undefined,
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100487 armnn::MemorySource::Undefined,
488 params.m_ThreadPoolSize);
Sadik Armagana04a9d72021-04-27 10:02:10 +0100489 std::string errorMessage;
490 ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet), errorMessage, networkProperties);
Matthew Sloyan4f29f152021-01-18 16:10:20 +0000491
492 ARMNN_LOG(info) << "Network loading time: " << std::setprecision(2)
493 << std::fixed << armnn::GetTimeDuration(loading_start_time).count() << " ms\n";
surmeh013537c2c2018-05-18 16:31:43 +0100494 }
495
telsoa014fcda012018-03-09 14:13:49 +0000496 if (ret == armnn::Status::Failure)
497 {
498 throw armnn::Exception("IRuntime::LoadNetwork failed");
499 }
500 }
501
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000502 void CheckInputIndexIsValid(unsigned int inputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000503 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000504 if (m_InputBindings.size() < inputIndex + 1)
505 {
James Ward08f40162020-09-07 16:45:07 +0100506 throw armnn::Exception(fmt::format("Input index out of range: {}", inputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000507 }
telsoa014fcda012018-03-09 14:13:49 +0000508 }
509
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000510 void CheckOutputIndexIsValid(unsigned int outputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000511 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000512 if (m_OutputBindings.size() < outputIndex + 1)
513 {
James Ward08f40162020-09-07 16:45:07 +0100514 throw armnn::Exception(fmt::format("Output index out of range: {}", outputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000515 }
516 }
517
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100518 unsigned int GetInputSize(unsigned int inputIndex = 0u) const
519 {
520 CheckInputIndexIsValid(inputIndex);
521 return m_InputBindings[inputIndex].second.GetNumElements();
522 }
523
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000524 unsigned int GetOutputSize(unsigned int outputIndex = 0u) const
525 {
526 CheckOutputIndexIsValid(outputIndex);
527 return m_OutputBindings[outputIndex].second.GetNumElements();
528 }
529
James Conroy7b4886f2019-04-11 10:23:58 +0100530 std::chrono::duration<double, std::milli> Run(
531 const std::vector<TContainer>& inputContainers,
532 std::vector<TContainer>& outputContainers)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000533 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000534 for (unsigned int i = 0; i < outputContainers.size(); ++i)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000535 {
536 const unsigned int expectedOutputDataSize = GetOutputSize(i);
Ferran Balaguerc602f292019-02-08 17:09:55 +0000537
James Ward6d9f5c52020-09-28 11:56:35 +0100538 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000539 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100540 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
Ferran Balaguerc602f292019-02-08 17:09:55 +0000541 if (actualOutputDataSize < expectedOutputDataSize)
542 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100543 unsigned int outputIndex = i;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000544 throw armnn::Exception(
James Ward08f40162020-09-07 16:45:07 +0100545 fmt::format("Not enough data for output #{0}: expected "
546 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
Ferran Balaguerc602f292019-02-08 17:09:55 +0000547 }
548 },
549 outputContainers[i]);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000550 }
telsoa01c577f2c2018-08-31 09:22:23 +0100551
552 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
553 if (profiler)
554 {
555 profiler->EnableProfiling(m_EnableProfiling);
556 }
557
James Conroy7b4886f2019-04-11 10:23:58 +0100558 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
alered01a7227ac2020-05-07 14:58:29 +0100559 const auto start_time = armnn::GetTimeNow();
James Conroy7b4886f2019-04-11 10:23:58 +0100560
telsoa014fcda012018-03-09 14:13:49 +0000561 armnn::Status ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000562 MakeInputTensors(inputContainers),
563 MakeOutputTensors(outputContainers));
alered01a7227ac2020-05-07 14:58:29 +0100564 const auto duration = armnn::GetTimeDuration(start_time);
James Conroy7b4886f2019-04-11 10:23:58 +0100565
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100566 // if profiling is enabled print out the results
567 if (profiler && profiler->IsProfilingEnabled())
568 {
569 profiler->Print(std::cout);
570 }
571
telsoa014fcda012018-03-09 14:13:49 +0000572 if (ret == armnn::Status::Failure)
573 {
574 throw armnn::Exception("IRuntime::EnqueueWorkload failed");
575 }
James Conroy7b4886f2019-04-11 10:23:58 +0100576 else
577 {
alered01a7227ac2020-05-07 14:58:29 +0100578 return duration;
James Conroy7b4886f2019-04-11 10:23:58 +0100579 }
telsoa014fcda012018-03-09 14:13:49 +0000580 }
581
Sadik Armagana04a9d72021-04-27 10:02:10 +0100582 std::tuple<armnn::profiling::ProfilingGuid, std::chrono::duration<double, std::milli>> RunAsync(
583 armnn::experimental::IWorkingMemHandle& workingMemHandleRef,
584 const std::vector<TContainer>& inputContainers,
585 std::vector<TContainer>& outputContainers)
586 {
587 for (unsigned int i = 0; i < outputContainers.size(); ++i)
588 {
589 const unsigned int expectedOutputDataSize = GetOutputSize(i);
590
591 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
592 {
593 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
594 if (actualOutputDataSize < expectedOutputDataSize)
595 {
596 unsigned int outputIndex = i;
597 throw armnn::Exception(
598 fmt::format("Not enough data for output #{0}: expected "
599 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
600 }
601 },
602 outputContainers[i]);
603 }
604
605 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
606 if (profiler)
607 {
608 profiler->EnableProfiling(m_EnableProfiling);
609 }
610
611 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
612 const auto start_time = armnn::GetTimeNow();
613
614 armnn::Status ret = m_Runtime->Execute(workingMemHandleRef,
615 MakeInputTensors(inputContainers),
616 MakeOutputTensors(outputContainers));
617 auto inferenceID = workingMemHandleRef.GetInferenceId();
618
619 const auto duration = armnn::GetTimeDuration(start_time);
620
621 // if profiling is enabled print out the results
622 if (profiler && profiler->IsProfilingEnabled())
623 {
624 profiler->Print(std::cout);
625 }
626
627 if (ret == armnn::Status::Failure)
628 {
629 throw armnn::Exception(
630 fmt::format("IRuntime::Execute asynchronously failed for network #{0} on inference #{1}",
631 m_NetworkIdentifier, inferenceID));
632 }
633 else
634 {
635 return std::make_tuple(inferenceID, duration);
636 }
637 }
638
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100639 void RunAsync(const std::vector<TContainer>& inputContainers,
640 std::vector<TContainer>& outputContainers,
641 armnn::experimental::IAsyncExecutionCallbackPtr cb)
642 {
643 for (unsigned int i = 0; i < outputContainers.size(); ++i)
644 {
645 const unsigned int expectedOutputDataSize = GetOutputSize(i);
646
647 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
648 {
649 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
650 if (actualOutputDataSize < expectedOutputDataSize)
651 {
652 unsigned int outputIndex = i;
653 throw armnn::Exception(
654 fmt::format("Not enough data for output #{0}: expected "
655 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
656 }
657 },
658 outputContainers[i]);
659 }
660
661 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
662 if (profiler)
663 {
664 profiler->EnableProfiling(m_EnableProfiling);
665 }
666
667 m_Runtime->Schedule(m_NetworkIdentifier,
668 MakeInputTensors(inputContainers),
669 MakeOutputTensors(outputContainers),
670 armnn::QosExecPriority::Medium,
671 cb);
672
673 // if profiling is enabled print out the results
674 if (profiler && profiler->IsProfilingEnabled())
675 {
676 profiler->Print(std::cout);
677 }
678 }
679
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100680 const armnn::BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100681 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000682 CheckInputIndexIsValid(inputIndex);
683 return m_InputBindings[inputIndex];
telsoa01c577f2c2018-08-31 09:22:23 +0100684 }
685
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100686 const std::vector<armnn::BindingPointInfo>& GetInputBindingInfos() const
telsoa01c577f2c2018-08-31 09:22:23 +0100687 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000688 return m_InputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100689 }
690
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100691 const armnn::BindingPointInfo& GetOutputBindingInfo(unsigned int outputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100692 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000693 CheckOutputIndexIsValid(outputIndex);
694 return m_OutputBindings[outputIndex];
695 }
696
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100697 const std::vector<armnn::BindingPointInfo>& GetOutputBindingInfos() const
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000698 {
699 return m_OutputBindings;
700 }
701
702 QuantizationParams GetQuantizationParams(unsigned int outputIndex = 0u) const
703 {
704 CheckOutputIndexIsValid(outputIndex);
705 return std::make_pair(m_OutputBindings[outputIndex].second.GetQuantizationScale(),
706 m_OutputBindings[outputIndex].second.GetQuantizationOffset());
707 }
708
Narumol Prangnawarat4628d052019-02-25 17:26:05 +0000709 QuantizationParams GetInputQuantizationParams(unsigned int inputIndex = 0u) const
710 {
711 CheckInputIndexIsValid(inputIndex);
712 return std::make_pair(m_InputBindings[inputIndex].second.GetQuantizationScale(),
713 m_InputBindings[inputIndex].second.GetQuantizationOffset());
714 }
715
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000716 std::vector<QuantizationParams> GetAllQuantizationParams() const
717 {
718 std::vector<QuantizationParams> quantizationParams;
719 for (unsigned int i = 0u; i < m_OutputBindings.size(); i++)
720 {
721 quantizationParams.push_back(GetQuantizationParams(i));
722 }
723 return quantizationParams;
telsoa01c577f2c2018-08-31 09:22:23 +0100724 }
725
Sadik Armagana04a9d72021-04-27 10:02:10 +0100726 std::unique_ptr<armnn::experimental::IWorkingMemHandle> CreateWorkingMemHandle()
727 {
728 return m_Runtime->CreateWorkingMemHandle(m_NetworkIdentifier);
729 }
730
telsoa014fcda012018-03-09 14:13:49 +0000731private:
telsoa01c577f2c2018-08-31 09:22:23 +0100732 armnn::NetworkId m_NetworkIdentifier;
733 std::shared_ptr<armnn::IRuntime> m_Runtime;
734
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100735 std::vector<armnn::BindingPointInfo> m_InputBindings;
736 std::vector<armnn::BindingPointInfo> m_OutputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100737 bool m_EnableProfiling;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100738 std::string m_DynamicBackendsPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100739
telsoa014fcda012018-03-09 14:13:49 +0000740 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000741 armnn::InputTensors MakeInputTensors(const std::vector<TContainer>& inputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000742 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100743 return armnnUtils::MakeInputTensors(m_InputBindings, inputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000744 }
745
746 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000747 armnn::OutputTensors MakeOutputTensors(std::vector<TContainer>& outputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000748 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100749 return armnnUtils::MakeOutputTensors(m_OutputBindings, outputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000750 }
Ferran Balaguerc602f292019-02-08 17:09:55 +0000751};