blob: cf3aae137e16122b090b16fe0e76e90fc5a93439 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Sadik Armagana9c2ce12020-07-14 10:02:22 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matteo Martincighc601aa62019-10-29 15:03:22 +00005
telsoa014fcda012018-03-09 14:13:49 +00006#pragma once
Matteo Martincighc601aa62019-10-29 15:03:22 +00007
David Beckf0b48452018-10-19 15:20:56 +01008#include <armnn/ArmNN.hpp>
Finn Williamsf364d532021-06-09 17:07:33 +01009#include <armnn/Threadpool.hpp>
alered01a7227ac2020-05-07 14:58:29 +010010#include <armnn/Logging.hpp>
11#include <armnn/utility/Timer.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000012#include <armnn/BackendRegistry.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010013#include <armnn/utility/Assert.hpp>
Matthew Sloyan80c6b142020-09-08 12:00:32 +010014#include <armnn/utility/NumericCast.hpp>
Nikhil Raj7dcc6972021-04-30 15:44:24 +010015#include <common/include/ProfilingGuid.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010016
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000017#if defined(ARMNN_SERIALIZER)
Derek Lamberti0028d1b2019-02-20 13:57:42 +000018#include "armnnDeserializer/IDeserializer.hpp"
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000019#endif
telsoa01c577f2c2018-08-31 09:22:23 +010020#if defined(ARMNN_TF_LITE_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010021#include <armnnTfLiteParser/ITfLiteParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010022#endif
telsoa01c577f2c2018-08-31 09:22:23 +010023#if defined(ARMNN_ONNX_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010024#include <armnnOnnxParser/IOnnxParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010025#endif
telsoa014fcda012018-03-09 14:13:49 +000026
Rob Hughes9542f902021-07-14 09:48:54 +010027#include <armnnUtils/Filesystem.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000028#include <HeapProfiling.hpp>
Jim Flynn2fd61002019-05-03 12:54:26 +010029#include <TensorIOUtils.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000030
David Monahana8837bf2020-04-16 10:01:56 +010031#include "armnn/utility/StringUtils.hpp"
James Wardc89829f2020-10-12 14:17:36 +010032#include <cxxopts/cxxopts.hpp>
33#include "CxxoptsUtils.hpp"
James Ward08f40162020-09-07 16:45:07 +010034#include <fmt/format.h>
James Ward6d9f5c52020-09-28 11:56:35 +010035#include <mapbox/variant.hpp>
telsoa014fcda012018-03-09 14:13:49 +000036
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000037#include <algorithm>
38#include <iterator>
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010039#include <fstream>
telsoa014fcda012018-03-09 14:13:49 +000040#include <map>
41#include <string>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000042#include <vector>
telsoa01c577f2c2018-08-31 09:22:23 +010043#include <type_traits>
44
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010045namespace
46{
47
48inline bool CheckRequestedBackendsAreValid(const std::vector<armnn::BackendId>& backendIds,
49 armnn::Optional<std::string&> invalidBackendIds = armnn::EmptyOptional())
50{
51 if (backendIds.empty())
52 {
53 return false;
54 }
55
56 armnn::BackendIdSet validBackendIds = armnn::BackendRegistryInstance().GetBackendIds();
57
58 bool allValid = true;
59 for (const auto& backendId : backendIds)
60 {
61 if (std::find(validBackendIds.begin(), validBackendIds.end(), backendId) == validBackendIds.end())
62 {
63 allValid = false;
64 if (invalidBackendIds)
65 {
66 if (!invalidBackendIds.value().empty())
67 {
68 invalidBackendIds.value() += ", ";
69 }
70 invalidBackendIds.value() += backendId;
71 }
72 }
73 }
74 return allValid;
75}
76
77} // anonymous namespace
78
telsoa01c577f2c2018-08-31 09:22:23 +010079namespace InferenceModelInternal
80{
Jim Flynnb4d7eae2019-05-01 14:44:27 +010081using BindingPointInfo = armnn::BindingPointInfo;
telsoa01c577f2c2018-08-31 09:22:23 +010082
83using QuantizationParams = std::pair<float,int32_t>;
84
85struct Params
86{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000087 std::string m_ModelPath;
88 std::vector<std::string> m_InputBindings;
89 std::vector<armnn::TensorShape> m_InputShapes;
90 std::vector<std::string> m_OutputBindings;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000091 std::vector<armnn::BackendId> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +010092 std::string m_DynamicBackendsPath;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000093 size_t m_SubgraphId;
94 bool m_IsModelBinary;
95 bool m_VisualizePostOptimizationModel;
96 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +000097 bool m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +010098 bool m_PrintIntermediateLayers;
Derek Lamberti132563c2019-12-02 16:06:40 +000099 bool m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100100 bool m_InferOutputShape;
Sadik Armagana25886e2020-09-15 17:17:08 +0100101 bool m_EnableFastMath;
Matthew Sloyan42432112021-01-08 10:30:51 +0000102 bool m_SaveCachedNetwork;
Keith Davisf4874862021-08-09 16:49:18 +0100103 bool m_OutputDetailsToStdOut;
Keith Davis4914d0c2021-08-18 17:14:05 +0100104 bool m_OutputDetailsOnlyToStdOut;
Matthew Sloyan42432112021-01-08 10:30:51 +0000105 std::string m_CachedNetworkFilePath;
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000106 unsigned int m_NumberOfThreads;
Finn Williams40646322021-02-11 16:16:42 +0000107 std::string m_MLGOTuningFilePath;
Sadik Armagana04a9d72021-04-27 10:02:10 +0100108 bool m_AsyncEnabled;
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100109 size_t m_ThreadPoolSize;
Finn Williams40646322021-02-11 16:16:42 +0000110
telsoa01c577f2c2018-08-31 09:22:23 +0100111
112 Params()
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100113 : m_ComputeDevices{}
telsoa01c577f2c2018-08-31 09:22:23 +0100114 , m_SubgraphId(0)
115 , m_IsModelBinary(true)
116 , m_VisualizePostOptimizationModel(false)
117 , m_EnableFp16TurboMode(false)
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000118 , m_EnableBf16TurboMode(false)
Matthew Jackson54658b92019-08-27 15:35:59 +0100119 , m_PrintIntermediateLayers(false)
Derek Lamberti132563c2019-12-02 16:06:40 +0000120 , m_ParseUnsupported(false)
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100121 , m_InferOutputShape(false)
Sadik Armagana25886e2020-09-15 17:17:08 +0100122 , m_EnableFastMath(false)
Matthew Sloyan42432112021-01-08 10:30:51 +0000123 , m_SaveCachedNetwork(false)
Keith Davisf4874862021-08-09 16:49:18 +0100124 , m_OutputDetailsToStdOut(false)
Keith Davis4914d0c2021-08-18 17:14:05 +0100125 , m_OutputDetailsOnlyToStdOut(false)
Matthew Sloyan42432112021-01-08 10:30:51 +0000126 , m_CachedNetworkFilePath("")
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000127 , m_NumberOfThreads(0)
Finn Williams40646322021-02-11 16:16:42 +0000128 , m_MLGOTuningFilePath("")
Sadik Armagana04a9d72021-04-27 10:02:10 +0100129 , m_AsyncEnabled(false)
Kevin May94dd4db2021-05-26 16:01:08 +0100130 , m_ThreadPoolSize(0)
telsoa01c577f2c2018-08-31 09:22:23 +0100131 {}
132};
133
134} // namespace InferenceModelInternal
135
136template <typename IParser>
137struct CreateNetworkImpl
138{
139public:
140 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100141
142 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100143 std::vector<armnn::BindingPointInfo>& inputBindings,
144 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100145 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000146 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100147
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000148 // Create a network from a file on disk
149 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100150
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000151 std::map<std::string, armnn::TensorShape> inputShapes;
152 if (!params.m_InputShapes.empty())
153 {
154 const size_t numInputShapes = params.m_InputShapes.size();
155 const size_t numInputBindings = params.m_InputBindings.size();
156 if (numInputShapes < numInputBindings)
157 {
James Ward08f40162020-09-07 16:45:07 +0100158 throw armnn::Exception(fmt::format(
159 "Not every input has its tensor shape specified: expected={0}, got={1}",
160 numInputBindings, numInputShapes));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000161 }
telsoa01c577f2c2018-08-31 09:22:23 +0100162
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000163 for (size_t i = 0; i < numInputShapes; i++)
164 {
165 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
166 }
167 }
telsoa01c577f2c2018-08-31 09:22:23 +0100168
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000169 std::vector<std::string> requestedOutputs = params.m_OutputBindings;
170 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
171
172 {
173 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
174 // Handle text and binary input differently by calling the corresponding parser function
175 network = (params.m_IsModelBinary ?
176 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
177 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
178 }
179
180 for (const std::string& inputLayerName : params.m_InputBindings)
181 {
182 inputBindings.push_back(parser->GetNetworkInputBindingInfo(inputLayerName));
183 }
184
185 for (const std::string& outputLayerName : params.m_OutputBindings)
186 {
187 outputBindings.push_back(parser->GetNetworkOutputBindingInfo(outputLayerName));
188 }
189
190 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100191 }
192};
193
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000194#if defined(ARMNN_SERIALIZER)
195template <>
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000196struct CreateNetworkImpl<armnnDeserializer::IDeserializer>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000197{
198public:
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000199 using IParser = armnnDeserializer::IDeserializer;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000200 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000201
202 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100203 std::vector<armnn::BindingPointInfo>& inputBindings,
204 std::vector<armnn::BindingPointInfo>& outputBindings)
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000205 {
206 auto parser(IParser::Create());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100207 ARMNN_ASSERT(parser);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000208
209 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
210
211 {
212 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000213
Francis Murtagh532a29d2020-06-29 11:50:01 +0100214 std::error_code errorCode;
215 fs::path pathToFile(params.m_ModelPath);
216 if (!fs::exists(pathToFile, errorCode))
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000217 {
James Ward08f40162020-09-07 16:45:07 +0100218 throw armnn::FileNotFoundException(fmt::format("Cannot find the file ({0}) errorCode: {1} {2}",
219 params.m_ModelPath,
220 errorCode.message(),
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000221 CHECK_LOCATION().AsString()));
222 }
223 std::ifstream file(params.m_ModelPath, std::ios::binary);
224
225 network = parser->CreateNetworkFromBinary(file);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000226 }
227
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100228 unsigned int subgraphId = armnn::numeric_cast<unsigned int>(params.m_SubgraphId);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000229
230 for (const std::string& inputLayerName : params.m_InputBindings)
231 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000232 armnnDeserializer::BindingPointInfo inputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100233 parser->GetNetworkInputBindingInfo(subgraphId, inputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000234 inputBindings.push_back(std::make_pair(inputBinding.m_BindingId, inputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000235 }
236
237 for (const std::string& outputLayerName : params.m_OutputBindings)
238 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000239 armnnDeserializer::BindingPointInfo outputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100240 parser->GetNetworkOutputBindingInfo(subgraphId, outputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000241 outputBindings.push_back(std::make_pair(outputBinding.m_BindingId, outputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000242 }
243
244 return network;
245 }
246};
247#endif
248
telsoa01c577f2c2018-08-31 09:22:23 +0100249#if defined(ARMNN_TF_LITE_PARSER)
250template <>
251struct CreateNetworkImpl<armnnTfLiteParser::ITfLiteParser>
252{
253public:
254 using IParser = armnnTfLiteParser::ITfLiteParser;
255 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100256
257 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100258 std::vector<armnn::BindingPointInfo>& inputBindings,
259 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100260 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000261 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100262
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000263 // Create a network from a file on disk
Derek Lamberti132563c2019-12-02 16:06:40 +0000264 IParser::TfLiteParserOptions options;
265 options.m_StandInLayerForUnsupported = params.m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100266 options.m_InferAndValidate = params.m_InferOutputShape;
Derek Lamberti132563c2019-12-02 16:06:40 +0000267 auto parser(IParser::Create(options));
telsoa01c577f2c2018-08-31 09:22:23 +0100268
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000269 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100270
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000271 {
272 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
273 network = parser->CreateNetworkFromBinaryFile(modelPath.c_str());
274 }
telsoa01c577f2c2018-08-31 09:22:23 +0100275
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000276 for (const std::string& inputLayerName : params.m_InputBindings)
277 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100278 armnn::BindingPointInfo inputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000279 parser->GetNetworkInputBindingInfo(params.m_SubgraphId, inputLayerName);
280 inputBindings.push_back(inputBinding);
281 }
282
283 for (const std::string& outputLayerName : params.m_OutputBindings)
284 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100285 armnn::BindingPointInfo outputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000286 parser->GetNetworkOutputBindingInfo(params.m_SubgraphId, outputLayerName);
287 outputBindings.push_back(outputBinding);
288 }
289
290 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100291 }
292};
293#endif
294
295#if defined(ARMNN_ONNX_PARSER)
296template <>
297struct CreateNetworkImpl<armnnOnnxParser::IOnnxParser>
298{
299public:
300 using IParser = armnnOnnxParser::IOnnxParser;
301 using Params = InferenceModelInternal::Params;
302 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
303
304 static armnn::INetworkPtr Create(const Params& params,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000305 std::vector<BindingPointInfo>& inputBindings,
306 std::vector<BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100307 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000308 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100309
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000310 // Create a network from a file on disk
311 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100312
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000313 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100314
Narumol Prangnawarat1b11f322021-10-13 11:44:50 +0100315 std::map<std::string, armnn::TensorShape> inputShapes;
316 if (!params.m_InputShapes.empty())
317 {
318 const size_t numInputShapes = params.m_InputShapes.size();
319 const size_t numInputBindings = params.m_InputBindings.size();
320 if (numInputShapes < numInputBindings)
321 {
322 throw armnn::Exception(fmt::format(
323 "Not every input has its tensor shape specified: expected={0}, got={1}",
324 numInputBindings, numInputShapes));
325 }
326
327 for (size_t i = 0; i < numInputShapes; i++)
328 {
329 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
330 }
331
332 {
333 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
334 network = (params.m_IsModelBinary ?
335 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes) :
336 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes));
337 }
338 }
339
340 else
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000341 {
342 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
343 network = (params.m_IsModelBinary ?
344 parser->CreateNetworkFromBinaryFile(modelPath.c_str()) :
345 parser->CreateNetworkFromTextFile(modelPath.c_str()));
346 }
telsoa01c577f2c2018-08-31 09:22:23 +0100347
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000348 for (const std::string& inputLayerName : params.m_InputBindings)
349 {
350 BindingPointInfo inputBinding = parser->GetNetworkInputBindingInfo(inputLayerName);
351 inputBindings.push_back(inputBinding);
352 }
353
354 for (const std::string& outputLayerName : params.m_OutputBindings)
355 {
356 BindingPointInfo outputBinding = parser->GetNetworkOutputBindingInfo(outputLayerName);
357 outputBindings.push_back(outputBinding);
358 }
359
360 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100361 }
362};
363#endif
telsoa014fcda012018-03-09 14:13:49 +0000364
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000365
telsoa014fcda012018-03-09 14:13:49 +0000366
367template <typename IParser, typename TDataType>
368class InferenceModel
369{
370public:
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000371 using DataType = TDataType;
372 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000373 using QuantizationParams = InferenceModelInternal::QuantizationParams;
Finn Williamsf806c4d2021-02-22 15:13:12 +0000374 using TContainer
375 = mapbox::util::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>, std::vector<int8_t>>;
telsoa014fcda012018-03-09 14:13:49 +0000376
377 struct CommandLineOptions
378 {
379 std::string m_ModelDir;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000380 std::vector<std::string> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100381 std::string m_DynamicBackendsPath;
surmeh013537c2c2018-05-18 16:31:43 +0100382 bool m_VisualizePostOptimizationModel;
telsoa01c577f2c2018-08-31 09:22:23 +0100383 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000384 bool m_EnableBf16TurboMode;
Pablo Tello507f39d2019-04-15 15:44:39 +0100385 std::string m_Labels;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000386
387 std::vector<armnn::BackendId> GetComputeDevicesAsBackendIds()
388 {
389 std::vector<armnn::BackendId> backendIds;
390 std::copy(m_ComputeDevices.begin(), m_ComputeDevices.end(), std::back_inserter(backendIds));
391 return backendIds;
392 }
telsoa014fcda012018-03-09 14:13:49 +0000393 };
394
James Wardc89829f2020-10-12 14:17:36 +0100395 static void AddCommandLineOptions(cxxopts::Options& options,
396 CommandLineOptions& cLineOptions, std::vector<std::string>& required)
telsoa014fcda012018-03-09 14:13:49 +0000397 {
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000398 const std::vector<std::string> defaultComputes = { "CpuAcc", "CpuRef" };
David Beckf0b48452018-10-19 15:20:56 +0100399
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100400 const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
401 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
402
James Wardc89829f2020-10-12 14:17:36 +0100403 options
404 .allow_unrecognised_options()
405 .add_options()
Nikhil Raj6dd178f2021-04-02 22:04:39 +0100406 ("m,model-dir", "Path to directory containing model files (.prototxt/.tflite)",
James Wardc89829f2020-10-12 14:17:36 +0100407 cxxopts::value<std::string>(cLineOptions.m_ModelDir))
408 ("c,compute", backendsMessage.c_str(),
409 cxxopts::value<std::vector<std::string>>(cLineOptions.m_ComputeDevices)->default_value("CpuRef"))
410 ("b,dynamic-backends-path",
411 "Path where to load any available dynamic backend from. "
412 "If left empty (the default), dynamic backends will not be used.",
413 cxxopts::value(cLineOptions.m_DynamicBackendsPath))
414 ("l,labels",
415 "Text file containing one image filename - correct label pair per line, "
416 "used to test the accuracy of the network.", cxxopts::value<std::string>(cLineOptions.m_Labels))
417 ("v,visualize-optimized-model",
418 "Produce a dot file useful for visualizing the graph post optimization."
419 "The file will have the same name as the model with the .dot extention.",
420 cxxopts::value<bool>(cLineOptions.m_VisualizePostOptimizationModel)->default_value("false"))
421 ("fp16-turbo-mode",
422 "If this option is enabled FP32 layers, weights and biases will be converted "
423 "to FP16 where the backend supports it.",
424 cxxopts::value<bool>(cLineOptions.m_EnableFp16TurboMode)->default_value("false"))
425 ("bf16-turbo-mode",
426 "If this option is enabled FP32 layers, weights and biases will be converted "
427 "to BF16 where the backend supports it.",
428 cxxopts::value<bool>(cLineOptions.m_EnableBf16TurboMode)->default_value("false"));
429
430 required.emplace_back("model-dir");
telsoa014fcda012018-03-09 14:13:49 +0000431 }
432
Matthew Bentham3e68b972019-04-09 13:10:46 +0100433 InferenceModel(const Params& params,
434 bool enableProfiling,
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100435 const std::string& dynamicBackendsPath,
Matthew Bentham3e68b972019-04-09 13:10:46 +0100436 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
Keith Davis4914d0c2021-08-18 17:14:05 +0100437 : m_EnableProfiling(enableProfiling),
438 m_ProfilingDetailsMethod(armnn::ProfilingDetailsMethod::Undefined)
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100439 , m_DynamicBackendsPath(dynamicBackendsPath)
telsoa014fcda012018-03-09 14:13:49 +0000440 {
telsoa01c577f2c2018-08-31 09:22:23 +0100441 if (runtime)
telsoa014fcda012018-03-09 14:13:49 +0000442 {
telsoa01c577f2c2018-08-31 09:22:23 +0100443 m_Runtime = runtime;
telsoa014fcda012018-03-09 14:13:49 +0000444 }
telsoa01c577f2c2018-08-31 09:22:23 +0100445 else
telsoa014fcda012018-03-09 14:13:49 +0000446 {
telsoa01c577f2c2018-08-31 09:22:23 +0100447 armnn::IRuntime::CreationOptions options;
Nina Drozd549ae372018-09-10 14:26:44 +0100448 options.m_EnableGpuProfiling = m_EnableProfiling;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100449 options.m_DynamicBackendsPath = m_DynamicBackendsPath;
Finn Williamsf364d532021-06-09 17:07:33 +0100450 m_Runtime = armnn::IRuntime::Create(options);
surmeh013537c2c2018-05-18 16:31:43 +0100451 }
telsoa014fcda012018-03-09 14:13:49 +0000452
Keith Davis4914d0c2021-08-18 17:14:05 +0100453 // Configure the Profiler if the the profiling details are opted for
454 if (params.m_OutputDetailsOnlyToStdOut)
455 m_ProfilingDetailsMethod = armnn::ProfilingDetailsMethod::DetailsOnly;
456 else if (params.m_OutputDetailsToStdOut)
457 m_ProfilingDetailsMethod = armnn::ProfilingDetailsMethod::DetailsWithEvents;
458
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100459 std::string invalidBackends;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000460 if (!CheckRequestedBackendsAreValid(params.m_ComputeDevices, armnn::Optional<std::string&>(invalidBackends)))
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100461 {
462 throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends);
463 }
464
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100465 armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork*){}};
surmeh013537c2c2018-05-18 16:31:43 +0100466 {
Finn Williams4422cec2021-03-22 17:51:06 +0000467 const auto parsing_start_time = armnn::GetTimeNow();
468 armnn::INetworkPtr network = CreateNetworkImpl<IParser>::Create(params, m_InputBindings, m_OutputBindings);
469
470 ARMNN_LOG(info) << "Network parsing time: " << std::setprecision(2)
471 << std::fixed << armnn::GetTimeDuration(parsing_start_time).count() << " ms\n";
472
surmeh013537c2c2018-05-18 16:31:43 +0100473 ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
telsoa01c577f2c2018-08-31 09:22:23 +0100474
475 armnn::OptimizerOptions options;
476 options.m_ReduceFp32ToFp16 = params.m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000477 options.m_ReduceFp32ToBf16 = params.m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +0100478 options.m_Debug = params.m_PrintIntermediateLayers;
Finn Williamsd218d982021-08-09 13:00:08 +0100479 options.m_shapeInferenceMethod = params.m_InferOutputShape ?
480 armnn::ShapeInferenceMethod::InferAndValidate : armnn::ShapeInferenceMethod::ValidateOnly;
Derek Lamberti44c4c982021-10-14 00:28:37 +0100481 options.m_ProfilingEnabled = m_EnableProfiling;
Finn Williamsd218d982021-08-09 13:00:08 +0100482
Sadik Armagana25886e2020-09-15 17:17:08 +0100483 armnn::BackendOptions gpuAcc("GpuAcc",
484 {
Matthew Sloyan42432112021-01-08 10:30:51 +0000485 { "FastMathEnabled", params.m_EnableFastMath },
486 { "SaveCachedNetwork", params.m_SaveCachedNetwork },
Finn Williams40646322021-02-11 16:16:42 +0000487 { "CachedNetworkFilePath", params.m_CachedNetworkFilePath },
488 { "MLGOTuningFilePath", params.m_MLGOTuningFilePath }
Sadik Armagana25886e2020-09-15 17:17:08 +0100489 });
Finn Williams40646322021-02-11 16:16:42 +0000490
Sadik Armagana25886e2020-09-15 17:17:08 +0100491 armnn::BackendOptions cpuAcc("CpuAcc",
492 {
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000493 { "FastMathEnabled", params.m_EnableFastMath },
494 { "NumberOfThreads", params.m_NumberOfThreads }
Sadik Armagana25886e2020-09-15 17:17:08 +0100495 });
496 options.m_ModelOptions.push_back(gpuAcc);
497 options.m_ModelOptions.push_back(cpuAcc);
498
alered01a7227ac2020-05-07 14:58:29 +0100499 const auto optimization_start_time = armnn::GetTimeNow();
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000500 optNet = armnn::Optimize(*network, params.m_ComputeDevices, m_Runtime->GetDeviceSpec(), options);
alered01a7227ac2020-05-07 14:58:29 +0100501
502 ARMNN_LOG(info) << "Optimization time: " << std::setprecision(2)
503 << std::fixed << armnn::GetTimeDuration(optimization_start_time).count() << " ms\n";
504
telsoa01c577f2c2018-08-31 09:22:23 +0100505 if (!optNet)
506 {
507 throw armnn::Exception("Optimize returned nullptr");
508 }
Finn Williams4422cec2021-03-22 17:51:06 +0000509
510
surmeh013537c2c2018-05-18 16:31:43 +0100511 }
telsoa014fcda012018-03-09 14:13:49 +0000512
surmeh013537c2c2018-05-18 16:31:43 +0100513 if (params.m_VisualizePostOptimizationModel)
514 {
Francis Murtagh532a29d2020-06-29 11:50:01 +0100515 fs::path filename = params.m_ModelPath;
surmeh013537c2c2018-05-18 16:31:43 +0100516 filename.replace_extension("dot");
Rob Hughes9e10c2b2019-07-23 15:37:19 +0100517 std::fstream file(filename.c_str(), std::ios_base::out);
surmeh013537c2c2018-05-18 16:31:43 +0100518 optNet->SerializeToDot(file);
519 }
520
521 armnn::Status ret;
522 {
523 ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");
Matthew Sloyan4f29f152021-01-18 16:10:20 +0000524
525 const auto loading_start_time = armnn::GetTimeNow();
Francis Murtagh73d3e2e2021-04-29 14:23:04 +0100526 armnn::INetworkProperties networkProperties(params.m_AsyncEnabled,
527 armnn::MemorySource::Undefined,
Keith Davis554fa092021-07-20 11:25:22 +0100528 armnn::MemorySource::Undefined,
Keith Davisf4874862021-08-09 16:49:18 +0100529 enableProfiling,
Keith Davis4914d0c2021-08-18 17:14:05 +0100530 m_ProfilingDetailsMethod);
Sadik Armagana04a9d72021-04-27 10:02:10 +0100531 std::string errorMessage;
532 ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet), errorMessage, networkProperties);
Matthew Sloyan4f29f152021-01-18 16:10:20 +0000533
534 ARMNN_LOG(info) << "Network loading time: " << std::setprecision(2)
535 << std::fixed << armnn::GetTimeDuration(loading_start_time).count() << " ms\n";
Finn Williamsf364d532021-06-09 17:07:33 +0100536
537 if (params.m_AsyncEnabled && params.m_ThreadPoolSize > 0)
538 {
539 std::vector<std::shared_ptr<armnn::IWorkingMemHandle>> memHandles;
540 for (size_t i = 0; i < params.m_ThreadPoolSize; ++i)
541 {
542 memHandles.emplace_back(m_Runtime->CreateWorkingMemHandle(m_NetworkIdentifier));
543 }
544
545 m_Threadpool = std::make_unique<armnn::Threadpool>(params.m_ThreadPoolSize,
546 m_Runtime.get(),
547 memHandles);
548 }
surmeh013537c2c2018-05-18 16:31:43 +0100549 }
550
telsoa014fcda012018-03-09 14:13:49 +0000551 if (ret == armnn::Status::Failure)
552 {
553 throw armnn::Exception("IRuntime::LoadNetwork failed");
554 }
555 }
556
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000557 void CheckInputIndexIsValid(unsigned int inputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000558 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000559 if (m_InputBindings.size() < inputIndex + 1)
560 {
James Ward08f40162020-09-07 16:45:07 +0100561 throw armnn::Exception(fmt::format("Input index out of range: {}", inputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000562 }
telsoa014fcda012018-03-09 14:13:49 +0000563 }
564
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000565 void CheckOutputIndexIsValid(unsigned int outputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000566 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000567 if (m_OutputBindings.size() < outputIndex + 1)
568 {
James Ward08f40162020-09-07 16:45:07 +0100569 throw armnn::Exception(fmt::format("Output index out of range: {}", outputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000570 }
571 }
572
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100573 unsigned int GetInputSize(unsigned int inputIndex = 0u) const
574 {
575 CheckInputIndexIsValid(inputIndex);
576 return m_InputBindings[inputIndex].second.GetNumElements();
577 }
578
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000579 unsigned int GetOutputSize(unsigned int outputIndex = 0u) const
580 {
581 CheckOutputIndexIsValid(outputIndex);
582 return m_OutputBindings[outputIndex].second.GetNumElements();
583 }
584
James Conroy7b4886f2019-04-11 10:23:58 +0100585 std::chrono::duration<double, std::milli> Run(
586 const std::vector<TContainer>& inputContainers,
587 std::vector<TContainer>& outputContainers)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000588 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000589 for (unsigned int i = 0; i < outputContainers.size(); ++i)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000590 {
591 const unsigned int expectedOutputDataSize = GetOutputSize(i);
Ferran Balaguerc602f292019-02-08 17:09:55 +0000592
James Ward6d9f5c52020-09-28 11:56:35 +0100593 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000594 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100595 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
Ferran Balaguerc602f292019-02-08 17:09:55 +0000596 if (actualOutputDataSize < expectedOutputDataSize)
597 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100598 unsigned int outputIndex = i;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000599 throw armnn::Exception(
James Ward08f40162020-09-07 16:45:07 +0100600 fmt::format("Not enough data for output #{0}: expected "
601 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
Ferran Balaguerc602f292019-02-08 17:09:55 +0000602 }
603 },
604 outputContainers[i]);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000605 }
telsoa01c577f2c2018-08-31 09:22:23 +0100606
607 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
telsoa01c577f2c2018-08-31 09:22:23 +0100608
James Conroy7b4886f2019-04-11 10:23:58 +0100609 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
alered01a7227ac2020-05-07 14:58:29 +0100610 const auto start_time = armnn::GetTimeNow();
James Conroy7b4886f2019-04-11 10:23:58 +0100611
telsoa014fcda012018-03-09 14:13:49 +0000612 armnn::Status ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000613 MakeInputTensors(inputContainers),
614 MakeOutputTensors(outputContainers));
alered01a7227ac2020-05-07 14:58:29 +0100615 const auto duration = armnn::GetTimeDuration(start_time);
James Conroy7b4886f2019-04-11 10:23:58 +0100616
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100617 // if profiling is enabled print out the results
618 if (profiler && profiler->IsProfilingEnabled())
619 {
620 profiler->Print(std::cout);
621 }
622
telsoa014fcda012018-03-09 14:13:49 +0000623 if (ret == armnn::Status::Failure)
624 {
625 throw armnn::Exception("IRuntime::EnqueueWorkload failed");
626 }
James Conroy7b4886f2019-04-11 10:23:58 +0100627 else
628 {
alered01a7227ac2020-05-07 14:58:29 +0100629 return duration;
James Conroy7b4886f2019-04-11 10:23:58 +0100630 }
telsoa014fcda012018-03-09 14:13:49 +0000631 }
632
Finn Williamsf364d532021-06-09 17:07:33 +0100633 std::tuple<unsigned int, std::chrono::duration<double, std::milli>> RunAsync(
Sadik Armagana04a9d72021-04-27 10:02:10 +0100634 armnn::experimental::IWorkingMemHandle& workingMemHandleRef,
635 const std::vector<TContainer>& inputContainers,
Finn Williamsf364d532021-06-09 17:07:33 +0100636 std::vector<TContainer>& outputContainers,
637 unsigned int inferenceID)
Sadik Armagana04a9d72021-04-27 10:02:10 +0100638 {
639 for (unsigned int i = 0; i < outputContainers.size(); ++i)
640 {
641 const unsigned int expectedOutputDataSize = GetOutputSize(i);
642
643 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
644 {
645 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
646 if (actualOutputDataSize < expectedOutputDataSize)
647 {
648 unsigned int outputIndex = i;
649 throw armnn::Exception(
650 fmt::format("Not enough data for output #{0}: expected "
651 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
652 }
653 },
654 outputContainers[i]);
655 }
656
657 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
Sadik Armagana04a9d72021-04-27 10:02:10 +0100658
659 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
660 const auto start_time = armnn::GetTimeNow();
661
662 armnn::Status ret = m_Runtime->Execute(workingMemHandleRef,
663 MakeInputTensors(inputContainers),
664 MakeOutputTensors(outputContainers));
Sadik Armagana04a9d72021-04-27 10:02:10 +0100665
666 const auto duration = armnn::GetTimeDuration(start_time);
667
668 // if profiling is enabled print out the results
669 if (profiler && profiler->IsProfilingEnabled())
670 {
671 profiler->Print(std::cout);
672 }
673
674 if (ret == armnn::Status::Failure)
675 {
676 throw armnn::Exception(
677 fmt::format("IRuntime::Execute asynchronously failed for network #{0} on inference #{1}",
678 m_NetworkIdentifier, inferenceID));
679 }
680 else
681 {
682 return std::make_tuple(inferenceID, duration);
683 }
684 }
685
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100686 void RunAsync(const std::vector<TContainer>& inputContainers,
687 std::vector<TContainer>& outputContainers,
Finn Williamsf364d532021-06-09 17:07:33 +0100688 std::shared_ptr<armnn::IAsyncExecutionCallback> cb)
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100689 {
690 for (unsigned int i = 0; i < outputContainers.size(); ++i)
691 {
692 const unsigned int expectedOutputDataSize = GetOutputSize(i);
693
694 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
695 {
696 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
697 if (actualOutputDataSize < expectedOutputDataSize)
698 {
699 unsigned int outputIndex = i;
700 throw armnn::Exception(
701 fmt::format("Not enough data for output #{0}: expected "
702 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
703 }
704 },
705 outputContainers[i]);
706 }
707
708 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100709
Finn Williamsf364d532021-06-09 17:07:33 +0100710 m_Threadpool->Schedule(m_NetworkIdentifier,
711 MakeInputTensors(inputContainers),
712 MakeOutputTensors(outputContainers),
713 armnn::QosExecPriority::Medium,
714 cb);
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100715
716 // if profiling is enabled print out the results
717 if (profiler && profiler->IsProfilingEnabled())
718 {
719 profiler->Print(std::cout);
720 }
721 }
722
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100723 const armnn::BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100724 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000725 CheckInputIndexIsValid(inputIndex);
726 return m_InputBindings[inputIndex];
telsoa01c577f2c2018-08-31 09:22:23 +0100727 }
728
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100729 const std::vector<armnn::BindingPointInfo>& GetInputBindingInfos() const
telsoa01c577f2c2018-08-31 09:22:23 +0100730 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000731 return m_InputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100732 }
733
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100734 const armnn::BindingPointInfo& GetOutputBindingInfo(unsigned int outputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100735 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000736 CheckOutputIndexIsValid(outputIndex);
737 return m_OutputBindings[outputIndex];
738 }
739
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100740 const std::vector<armnn::BindingPointInfo>& GetOutputBindingInfos() const
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000741 {
742 return m_OutputBindings;
743 }
744
745 QuantizationParams GetQuantizationParams(unsigned int outputIndex = 0u) const
746 {
747 CheckOutputIndexIsValid(outputIndex);
748 return std::make_pair(m_OutputBindings[outputIndex].second.GetQuantizationScale(),
749 m_OutputBindings[outputIndex].second.GetQuantizationOffset());
750 }
751
Narumol Prangnawarat4628d052019-02-25 17:26:05 +0000752 QuantizationParams GetInputQuantizationParams(unsigned int inputIndex = 0u) const
753 {
754 CheckInputIndexIsValid(inputIndex);
755 return std::make_pair(m_InputBindings[inputIndex].second.GetQuantizationScale(),
756 m_InputBindings[inputIndex].second.GetQuantizationOffset());
757 }
758
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000759 std::vector<QuantizationParams> GetAllQuantizationParams() const
760 {
761 std::vector<QuantizationParams> quantizationParams;
762 for (unsigned int i = 0u; i < m_OutputBindings.size(); i++)
763 {
764 quantizationParams.push_back(GetQuantizationParams(i));
765 }
766 return quantizationParams;
telsoa01c577f2c2018-08-31 09:22:23 +0100767 }
768
Sadik Armagana04a9d72021-04-27 10:02:10 +0100769 std::unique_ptr<armnn::experimental::IWorkingMemHandle> CreateWorkingMemHandle()
770 {
771 return m_Runtime->CreateWorkingMemHandle(m_NetworkIdentifier);
772 }
773
telsoa014fcda012018-03-09 14:13:49 +0000774private:
telsoa01c577f2c2018-08-31 09:22:23 +0100775 armnn::NetworkId m_NetworkIdentifier;
776 std::shared_ptr<armnn::IRuntime> m_Runtime;
Finn Williamsf364d532021-06-09 17:07:33 +0100777 std::unique_ptr<armnn::Threadpool> m_Threadpool;
telsoa01c577f2c2018-08-31 09:22:23 +0100778
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100779 std::vector<armnn::BindingPointInfo> m_InputBindings;
780 std::vector<armnn::BindingPointInfo> m_OutputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100781 bool m_EnableProfiling;
Keith Davis4914d0c2021-08-18 17:14:05 +0100782 armnn::ProfilingDetailsMethod m_ProfilingDetailsMethod;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100783 std::string m_DynamicBackendsPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100784
telsoa014fcda012018-03-09 14:13:49 +0000785 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000786 armnn::InputTensors MakeInputTensors(const std::vector<TContainer>& inputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000787 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100788 return armnnUtils::MakeInputTensors(m_InputBindings, inputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000789 }
790
791 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000792 armnn::OutputTensors MakeOutputTensors(std::vector<TContainer>& outputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000793 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100794 return armnnUtils::MakeOutputTensors(m_OutputBindings, outputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000795 }
Ferran Balaguerc602f292019-02-08 17:09:55 +0000796};