blob: c053a4429adc3d054bacf7b5deedc19bb934c91f [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Ryan OSheab5540542022-07-06 09:52:52 +01002// Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matteo Martincighc601aa62019-10-29 15:03:22 +00005
telsoa014fcda012018-03-09 14:13:49 +00006#pragma once
Matteo Martincighc601aa62019-10-29 15:03:22 +00007
David Monahan6bb47a72021-10-22 12:57:28 +01008
David Beckf0b48452018-10-19 15:20:56 +01009#include <armnn/ArmNN.hpp>
Ryan OSheab5540542022-07-06 09:52:52 +010010
11#if !defined(ARMNN_DISABLE_THREADS)
Finn Williamsf364d532021-06-09 17:07:33 +010012#include <armnn/Threadpool.hpp>
Ryan OSheab5540542022-07-06 09:52:52 +010013#include <common/include/IgnoreUnused.hpp>
14#endif
15
alered01a7227ac2020-05-07 14:58:29 +010016#include <armnn/Logging.hpp>
17#include <armnn/utility/Timer.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000018#include <armnn/BackendRegistry.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010019#include <armnn/utility/Assert.hpp>
Matthew Sloyan80c6b142020-09-08 12:00:32 +010020#include <armnn/utility/NumericCast.hpp>
Francis Murtagh40d27412021-10-28 11:11:35 +010021
22#include <armnnUtils/TContainer.hpp>
Teresa Charlin83b42912022-07-07 14:24:59 +010023#include "NetworkExecutionUtils/NetworkExecutionUtils.hpp"
Francis Murtagh40d27412021-10-28 11:11:35 +010024
Nikhil Raj7dcc6972021-04-30 15:44:24 +010025#include <common/include/ProfilingGuid.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010026
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000027#if defined(ARMNN_SERIALIZER)
Derek Lamberti0028d1b2019-02-20 13:57:42 +000028#include "armnnDeserializer/IDeserializer.hpp"
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000029#endif
telsoa01c577f2c2018-08-31 09:22:23 +010030#if defined(ARMNN_TF_LITE_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010031#include <armnnTfLiteParser/ITfLiteParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010032#endif
telsoa01c577f2c2018-08-31 09:22:23 +010033#if defined(ARMNN_ONNX_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010034#include <armnnOnnxParser/IOnnxParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010035#endif
telsoa014fcda012018-03-09 14:13:49 +000036
Rob Hughes9542f902021-07-14 09:48:54 +010037#include <armnnUtils/Filesystem.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000038#include <HeapProfiling.hpp>
Jim Flynn2fd61002019-05-03 12:54:26 +010039#include <TensorIOUtils.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000040
David Monahana8837bf2020-04-16 10:01:56 +010041#include "armnn/utility/StringUtils.hpp"
James Wardc89829f2020-10-12 14:17:36 +010042#include <cxxopts/cxxopts.hpp>
43#include "CxxoptsUtils.hpp"
James Ward08f40162020-09-07 16:45:07 +010044#include <fmt/format.h>
James Ward6d9f5c52020-09-28 11:56:35 +010045#include <mapbox/variant.hpp>
telsoa014fcda012018-03-09 14:13:49 +000046
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000047#include <algorithm>
48#include <iterator>
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010049#include <fstream>
telsoa014fcda012018-03-09 14:13:49 +000050#include <map>
51#include <string>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000052#include <vector>
telsoa01c577f2c2018-08-31 09:22:23 +010053#include <type_traits>
54
55namespace InferenceModelInternal
56{
Jim Flynnb4d7eae2019-05-01 14:44:27 +010057using BindingPointInfo = armnn::BindingPointInfo;
telsoa01c577f2c2018-08-31 09:22:23 +010058
59using QuantizationParams = std::pair<float,int32_t>;
60
61struct Params
62{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000063 std::string m_ModelPath;
64 std::vector<std::string> m_InputBindings;
65 std::vector<armnn::TensorShape> m_InputShapes;
66 std::vector<std::string> m_OutputBindings;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000067 std::vector<armnn::BackendId> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +010068 std::string m_DynamicBackendsPath;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000069 size_t m_SubgraphId;
Mike Kelly80512b02022-05-16 23:10:42 +010070 bool m_AllowExpandedDims;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000071 bool m_IsModelBinary;
72 bool m_VisualizePostOptimizationModel;
73 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +000074 bool m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +010075 bool m_PrintIntermediateLayers;
Keith Davis15f9c682022-10-14 15:50:33 +010076 bool m_PrintIntermediateLayersToFile;
Derek Lamberti132563c2019-12-02 16:06:40 +000077 bool m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +010078 bool m_InferOutputShape;
Sadik Armagana25886e2020-09-15 17:17:08 +010079 bool m_EnableFastMath;
Matthew Sloyan42432112021-01-08 10:30:51 +000080 bool m_SaveCachedNetwork;
Keith Davisf4874862021-08-09 16:49:18 +010081 bool m_OutputDetailsToStdOut;
Keith Davis4914d0c2021-08-18 17:14:05 +010082 bool m_OutputDetailsOnlyToStdOut;
Matthew Sloyan42432112021-01-08 10:30:51 +000083 std::string m_CachedNetworkFilePath;
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +000084 unsigned int m_NumberOfThreads;
Finn Williams40646322021-02-11 16:16:42 +000085 std::string m_MLGOTuningFilePath;
Sadik Armagana04a9d72021-04-27 10:02:10 +010086 bool m_AsyncEnabled;
Kevin Mayb4b3ac92021-05-21 16:42:21 +010087 size_t m_ThreadPoolSize;
Jim Flynn15425812022-02-15 16:53:13 +000088 bool m_ImportInputsIfAligned;
Finn Williams40646322021-02-11 16:16:42 +000089
telsoa01c577f2c2018-08-31 09:22:23 +010090
91 Params()
Matteo Martincigh00dda4a2019-08-14 11:42:30 +010092 : m_ComputeDevices{}
telsoa01c577f2c2018-08-31 09:22:23 +010093 , m_SubgraphId(0)
Mike Kelly80512b02022-05-16 23:10:42 +010094 , m_AllowExpandedDims(false)
telsoa01c577f2c2018-08-31 09:22:23 +010095 , m_IsModelBinary(true)
96 , m_VisualizePostOptimizationModel(false)
97 , m_EnableFp16TurboMode(false)
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +000098 , m_EnableBf16TurboMode(false)
Matthew Jackson54658b92019-08-27 15:35:59 +010099 , m_PrintIntermediateLayers(false)
Keith Davis15f9c682022-10-14 15:50:33 +0100100 , m_PrintIntermediateLayersToFile(false)
Derek Lamberti132563c2019-12-02 16:06:40 +0000101 , m_ParseUnsupported(false)
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100102 , m_InferOutputShape(false)
Sadik Armagana25886e2020-09-15 17:17:08 +0100103 , m_EnableFastMath(false)
Matthew Sloyan42432112021-01-08 10:30:51 +0000104 , m_SaveCachedNetwork(false)
Keith Davisf4874862021-08-09 16:49:18 +0100105 , m_OutputDetailsToStdOut(false)
Keith Davis4914d0c2021-08-18 17:14:05 +0100106 , m_OutputDetailsOnlyToStdOut(false)
Matthew Sloyan42432112021-01-08 10:30:51 +0000107 , m_CachedNetworkFilePath("")
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000108 , m_NumberOfThreads(0)
Finn Williams40646322021-02-11 16:16:42 +0000109 , m_MLGOTuningFilePath("")
Sadik Armagana04a9d72021-04-27 10:02:10 +0100110 , m_AsyncEnabled(false)
Kevin May94dd4db2021-05-26 16:01:08 +0100111 , m_ThreadPoolSize(0)
Jim Flynn15425812022-02-15 16:53:13 +0000112 , m_ImportInputsIfAligned(false)
telsoa01c577f2c2018-08-31 09:22:23 +0100113 {}
114};
115
116} // namespace InferenceModelInternal
117
118template <typename IParser>
119struct CreateNetworkImpl
120{
121public:
122 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100123
124 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100125 std::vector<armnn::BindingPointInfo>& inputBindings,
126 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100127 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000128 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100129
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000130 // Create a network from a file on disk
131 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100132
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000133 std::map<std::string, armnn::TensorShape> inputShapes;
134 if (!params.m_InputShapes.empty())
135 {
136 const size_t numInputShapes = params.m_InputShapes.size();
137 const size_t numInputBindings = params.m_InputBindings.size();
138 if (numInputShapes < numInputBindings)
139 {
James Ward08f40162020-09-07 16:45:07 +0100140 throw armnn::Exception(fmt::format(
141 "Not every input has its tensor shape specified: expected={0}, got={1}",
142 numInputBindings, numInputShapes));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000143 }
telsoa01c577f2c2018-08-31 09:22:23 +0100144
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000145 for (size_t i = 0; i < numInputShapes; i++)
146 {
147 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
148 }
149 }
telsoa01c577f2c2018-08-31 09:22:23 +0100150
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000151 std::vector<std::string> requestedOutputs = params.m_OutputBindings;
152 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
153
154 {
155 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
156 // Handle text and binary input differently by calling the corresponding parser function
157 network = (params.m_IsModelBinary ?
158 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
159 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
160 }
161
162 for (const std::string& inputLayerName : params.m_InputBindings)
163 {
164 inputBindings.push_back(parser->GetNetworkInputBindingInfo(inputLayerName));
165 }
166
167 for (const std::string& outputLayerName : params.m_OutputBindings)
168 {
169 outputBindings.push_back(parser->GetNetworkOutputBindingInfo(outputLayerName));
170 }
171
172 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100173 }
174};
175
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000176#if defined(ARMNN_SERIALIZER)
177template <>
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000178struct CreateNetworkImpl<armnnDeserializer::IDeserializer>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000179{
180public:
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000181 using IParser = armnnDeserializer::IDeserializer;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000182 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000183
184 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100185 std::vector<armnn::BindingPointInfo>& inputBindings,
186 std::vector<armnn::BindingPointInfo>& outputBindings)
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000187 {
188 auto parser(IParser::Create());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100189 ARMNN_ASSERT(parser);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000190
191 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
192
193 {
194 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000195
Francis Murtagh532a29d2020-06-29 11:50:01 +0100196 std::error_code errorCode;
197 fs::path pathToFile(params.m_ModelPath);
198 if (!fs::exists(pathToFile, errorCode))
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000199 {
James Ward08f40162020-09-07 16:45:07 +0100200 throw armnn::FileNotFoundException(fmt::format("Cannot find the file ({0}) errorCode: {1} {2}",
201 params.m_ModelPath,
202 errorCode.message(),
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000203 CHECK_LOCATION().AsString()));
204 }
205 std::ifstream file(params.m_ModelPath, std::ios::binary);
206
207 network = parser->CreateNetworkFromBinary(file);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000208 }
209
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100210 unsigned int subgraphId = armnn::numeric_cast<unsigned int>(params.m_SubgraphId);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000211
212 for (const std::string& inputLayerName : params.m_InputBindings)
213 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000214 armnnDeserializer::BindingPointInfo inputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100215 parser->GetNetworkInputBindingInfo(subgraphId, inputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000216 inputBindings.push_back(std::make_pair(inputBinding.m_BindingId, inputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000217 }
218
219 for (const std::string& outputLayerName : params.m_OutputBindings)
220 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000221 armnnDeserializer::BindingPointInfo outputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100222 parser->GetNetworkOutputBindingInfo(subgraphId, outputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000223 outputBindings.push_back(std::make_pair(outputBinding.m_BindingId, outputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000224 }
225
226 return network;
227 }
228};
229#endif
230
telsoa01c577f2c2018-08-31 09:22:23 +0100231#if defined(ARMNN_TF_LITE_PARSER)
232template <>
233struct CreateNetworkImpl<armnnTfLiteParser::ITfLiteParser>
234{
235public:
236 using IParser = armnnTfLiteParser::ITfLiteParser;
237 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100238
239 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100240 std::vector<armnn::BindingPointInfo>& inputBindings,
241 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100242 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000243 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100244
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000245 // Create a network from a file on disk
Derek Lamberti132563c2019-12-02 16:06:40 +0000246 IParser::TfLiteParserOptions options;
Mike Kelly80512b02022-05-16 23:10:42 +0100247 options.m_AllowExpandedDims = params.m_AllowExpandedDims;
Derek Lamberti132563c2019-12-02 16:06:40 +0000248 options.m_StandInLayerForUnsupported = params.m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100249 options.m_InferAndValidate = params.m_InferOutputShape;
Derek Lamberti132563c2019-12-02 16:06:40 +0000250 auto parser(IParser::Create(options));
telsoa01c577f2c2018-08-31 09:22:23 +0100251
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000252 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100253
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000254 {
255 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
256 network = parser->CreateNetworkFromBinaryFile(modelPath.c_str());
257 }
telsoa01c577f2c2018-08-31 09:22:23 +0100258
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000259 for (const std::string& inputLayerName : params.m_InputBindings)
260 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100261 armnn::BindingPointInfo inputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000262 parser->GetNetworkInputBindingInfo(params.m_SubgraphId, inputLayerName);
263 inputBindings.push_back(inputBinding);
264 }
265
266 for (const std::string& outputLayerName : params.m_OutputBindings)
267 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100268 armnn::BindingPointInfo outputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000269 parser->GetNetworkOutputBindingInfo(params.m_SubgraphId, outputLayerName);
270 outputBindings.push_back(outputBinding);
271 }
272
273 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100274 }
275};
276#endif
277
278#if defined(ARMNN_ONNX_PARSER)
279template <>
280struct CreateNetworkImpl<armnnOnnxParser::IOnnxParser>
281{
282public:
283 using IParser = armnnOnnxParser::IOnnxParser;
284 using Params = InferenceModelInternal::Params;
285 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
286
287 static armnn::INetworkPtr Create(const Params& params,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000288 std::vector<BindingPointInfo>& inputBindings,
289 std::vector<BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100290 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000291 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100292
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000293 // Create a network from a file on disk
294 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100295
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000296 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100297
Narumol Prangnawarat1b11f322021-10-13 11:44:50 +0100298 std::map<std::string, armnn::TensorShape> inputShapes;
299 if (!params.m_InputShapes.empty())
300 {
301 const size_t numInputShapes = params.m_InputShapes.size();
302 const size_t numInputBindings = params.m_InputBindings.size();
303 if (numInputShapes < numInputBindings)
304 {
305 throw armnn::Exception(fmt::format(
306 "Not every input has its tensor shape specified: expected={0}, got={1}",
307 numInputBindings, numInputShapes));
308 }
309
310 for (size_t i = 0; i < numInputShapes; i++)
311 {
312 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
313 }
314
315 {
316 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
317 network = (params.m_IsModelBinary ?
318 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes) :
319 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes));
320 }
321 }
322
323 else
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000324 {
325 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
326 network = (params.m_IsModelBinary ?
327 parser->CreateNetworkFromBinaryFile(modelPath.c_str()) :
328 parser->CreateNetworkFromTextFile(modelPath.c_str()));
329 }
telsoa01c577f2c2018-08-31 09:22:23 +0100330
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000331 for (const std::string& inputLayerName : params.m_InputBindings)
332 {
333 BindingPointInfo inputBinding = parser->GetNetworkInputBindingInfo(inputLayerName);
334 inputBindings.push_back(inputBinding);
335 }
336
337 for (const std::string& outputLayerName : params.m_OutputBindings)
338 {
339 BindingPointInfo outputBinding = parser->GetNetworkOutputBindingInfo(outputLayerName);
340 outputBindings.push_back(outputBinding);
341 }
342
343 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100344 }
345};
346#endif
telsoa014fcda012018-03-09 14:13:49 +0000347
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000348
telsoa014fcda012018-03-09 14:13:49 +0000349
350template <typename IParser, typename TDataType>
351class InferenceModel
352{
353public:
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000354 using DataType = TDataType;
355 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000356 using QuantizationParams = InferenceModelInternal::QuantizationParams;
David Monahan6bb47a72021-10-22 12:57:28 +0100357
telsoa014fcda012018-03-09 14:13:49 +0000358
359 struct CommandLineOptions
360 {
361 std::string m_ModelDir;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000362 std::vector<std::string> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100363 std::string m_DynamicBackendsPath;
surmeh013537c2c2018-05-18 16:31:43 +0100364 bool m_VisualizePostOptimizationModel;
telsoa01c577f2c2018-08-31 09:22:23 +0100365 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000366 bool m_EnableBf16TurboMode;
Pablo Tello507f39d2019-04-15 15:44:39 +0100367 std::string m_Labels;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000368
369 std::vector<armnn::BackendId> GetComputeDevicesAsBackendIds()
370 {
371 std::vector<armnn::BackendId> backendIds;
372 std::copy(m_ComputeDevices.begin(), m_ComputeDevices.end(), std::back_inserter(backendIds));
373 return backendIds;
374 }
telsoa014fcda012018-03-09 14:13:49 +0000375 };
376
James Wardc89829f2020-10-12 14:17:36 +0100377 static void AddCommandLineOptions(cxxopts::Options& options,
378 CommandLineOptions& cLineOptions, std::vector<std::string>& required)
telsoa014fcda012018-03-09 14:13:49 +0000379 {
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000380 const std::vector<std::string> defaultComputes = { "CpuAcc", "CpuRef" };
David Beckf0b48452018-10-19 15:20:56 +0100381
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100382 const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
383 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
384
James Wardc89829f2020-10-12 14:17:36 +0100385 options
386 .allow_unrecognised_options()
387 .add_options()
Nikhil Raj6dd178f2021-04-02 22:04:39 +0100388 ("m,model-dir", "Path to directory containing model files (.prototxt/.tflite)",
James Wardc89829f2020-10-12 14:17:36 +0100389 cxxopts::value<std::string>(cLineOptions.m_ModelDir))
390 ("c,compute", backendsMessage.c_str(),
391 cxxopts::value<std::vector<std::string>>(cLineOptions.m_ComputeDevices)->default_value("CpuRef"))
392 ("b,dynamic-backends-path",
393 "Path where to load any available dynamic backend from. "
394 "If left empty (the default), dynamic backends will not be used.",
395 cxxopts::value(cLineOptions.m_DynamicBackendsPath))
396 ("l,labels",
397 "Text file containing one image filename - correct label pair per line, "
398 "used to test the accuracy of the network.", cxxopts::value<std::string>(cLineOptions.m_Labels))
399 ("v,visualize-optimized-model",
400 "Produce a dot file useful for visualizing the graph post optimization."
401 "The file will have the same name as the model with the .dot extention.",
402 cxxopts::value<bool>(cLineOptions.m_VisualizePostOptimizationModel)->default_value("false"))
403 ("fp16-turbo-mode",
404 "If this option is enabled FP32 layers, weights and biases will be converted "
405 "to FP16 where the backend supports it.",
406 cxxopts::value<bool>(cLineOptions.m_EnableFp16TurboMode)->default_value("false"))
407 ("bf16-turbo-mode",
408 "If this option is enabled FP32 layers, weights and biases will be converted "
409 "to BF16 where the backend supports it.",
410 cxxopts::value<bool>(cLineOptions.m_EnableBf16TurboMode)->default_value("false"));
411
412 required.emplace_back("model-dir");
telsoa014fcda012018-03-09 14:13:49 +0000413 }
414
Matthew Bentham3e68b972019-04-09 13:10:46 +0100415 InferenceModel(const Params& params,
416 bool enableProfiling,
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100417 const std::string& dynamicBackendsPath,
Matthew Bentham3e68b972019-04-09 13:10:46 +0100418 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
Keith Davis4914d0c2021-08-18 17:14:05 +0100419 : m_EnableProfiling(enableProfiling),
Jim Flynn15425812022-02-15 16:53:13 +0000420 m_ProfilingDetailsMethod(armnn::ProfilingDetailsMethod::Undefined),
421 m_DynamicBackendsPath(dynamicBackendsPath),
422 m_ImportInputsIfAligned(params.m_ImportInputsIfAligned)
telsoa014fcda012018-03-09 14:13:49 +0000423 {
telsoa01c577f2c2018-08-31 09:22:23 +0100424 if (runtime)
telsoa014fcda012018-03-09 14:13:49 +0000425 {
telsoa01c577f2c2018-08-31 09:22:23 +0100426 m_Runtime = runtime;
telsoa014fcda012018-03-09 14:13:49 +0000427 }
telsoa01c577f2c2018-08-31 09:22:23 +0100428 else
telsoa014fcda012018-03-09 14:13:49 +0000429 {
telsoa01c577f2c2018-08-31 09:22:23 +0100430 armnn::IRuntime::CreationOptions options;
Nina Drozd549ae372018-09-10 14:26:44 +0100431 options.m_EnableGpuProfiling = m_EnableProfiling;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100432 options.m_DynamicBackendsPath = m_DynamicBackendsPath;
Finn Williamsf364d532021-06-09 17:07:33 +0100433 m_Runtime = armnn::IRuntime::Create(options);
surmeh013537c2c2018-05-18 16:31:43 +0100434 }
telsoa014fcda012018-03-09 14:13:49 +0000435
Keith Davis4914d0c2021-08-18 17:14:05 +0100436 // Configure the Profiler if the the profiling details are opted for
437 if (params.m_OutputDetailsOnlyToStdOut)
438 m_ProfilingDetailsMethod = armnn::ProfilingDetailsMethod::DetailsOnly;
439 else if (params.m_OutputDetailsToStdOut)
440 m_ProfilingDetailsMethod = armnn::ProfilingDetailsMethod::DetailsWithEvents;
441
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100442 std::string invalidBackends;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000443 if (!CheckRequestedBackendsAreValid(params.m_ComputeDevices, armnn::Optional<std::string&>(invalidBackends)))
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100444 {
445 throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends);
446 }
447
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100448 armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork*){}};
surmeh013537c2c2018-05-18 16:31:43 +0100449 {
Finn Williams4422cec2021-03-22 17:51:06 +0000450 const auto parsing_start_time = armnn::GetTimeNow();
451 armnn::INetworkPtr network = CreateNetworkImpl<IParser>::Create(params, m_InputBindings, m_OutputBindings);
452
453 ARMNN_LOG(info) << "Network parsing time: " << std::setprecision(2)
Sadik Armagan4a0844d2022-01-26 09:57:05 +0000454 << std::fixed << armnn::GetTimeDuration(parsing_start_time).count() << " ms.";
Finn Williams4422cec2021-03-22 17:51:06 +0000455
surmeh013537c2c2018-05-18 16:31:43 +0100456 ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
telsoa01c577f2c2018-08-31 09:22:23 +0100457
John Mcloughlinc5ee0d72023-03-24 12:07:25 +0000458 armnn::OptimizerOptionsOpaque options;
459 options.SetReduceFp32ToFp16(params.m_EnableFp16TurboMode);
460 options.SetDebugEnabled(params.m_PrintIntermediateLayers);
461 options.SetDebugToFileEnabled(params.m_PrintIntermediateLayersToFile);
462 options.SetShapeInferenceMethod(params.m_InferOutputShape ?
463 armnn::ShapeInferenceMethod::InferAndValidate : armnn::ShapeInferenceMethod::ValidateOnly);
464 options.SetProfilingEnabled(m_EnableProfiling);
Finn Williamsd218d982021-08-09 13:00:08 +0100465
Sadik Armagana25886e2020-09-15 17:17:08 +0100466 armnn::BackendOptions gpuAcc("GpuAcc",
467 {
Matthew Sloyan42432112021-01-08 10:30:51 +0000468 { "FastMathEnabled", params.m_EnableFastMath },
469 { "SaveCachedNetwork", params.m_SaveCachedNetwork },
Finn Williams40646322021-02-11 16:16:42 +0000470 { "CachedNetworkFilePath", params.m_CachedNetworkFilePath },
471 { "MLGOTuningFilePath", params.m_MLGOTuningFilePath }
Sadik Armagana25886e2020-09-15 17:17:08 +0100472 });
Finn Williams40646322021-02-11 16:16:42 +0000473
Sadik Armagana25886e2020-09-15 17:17:08 +0100474 armnn::BackendOptions cpuAcc("CpuAcc",
475 {
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000476 { "FastMathEnabled", params.m_EnableFastMath },
477 { "NumberOfThreads", params.m_NumberOfThreads }
Sadik Armagana25886e2020-09-15 17:17:08 +0100478 });
John Mcloughlinc5ee0d72023-03-24 12:07:25 +0000479 options.AddModelOption(gpuAcc);
480 options.AddModelOption(cpuAcc);
Sadik Armagana25886e2020-09-15 17:17:08 +0100481
alered01a7227ac2020-05-07 14:58:29 +0100482 const auto optimization_start_time = armnn::GetTimeNow();
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000483 optNet = armnn::Optimize(*network, params.m_ComputeDevices, m_Runtime->GetDeviceSpec(), options);
alered01a7227ac2020-05-07 14:58:29 +0100484
485 ARMNN_LOG(info) << "Optimization time: " << std::setprecision(2)
Sadik Armagan4a0844d2022-01-26 09:57:05 +0000486 << std::fixed << armnn::GetTimeDuration(optimization_start_time).count() << " ms.";
alered01a7227ac2020-05-07 14:58:29 +0100487
telsoa01c577f2c2018-08-31 09:22:23 +0100488 if (!optNet)
489 {
490 throw armnn::Exception("Optimize returned nullptr");
491 }
Finn Williams4422cec2021-03-22 17:51:06 +0000492
493
surmeh013537c2c2018-05-18 16:31:43 +0100494 }
telsoa014fcda012018-03-09 14:13:49 +0000495
surmeh013537c2c2018-05-18 16:31:43 +0100496 if (params.m_VisualizePostOptimizationModel)
497 {
Francis Murtagh532a29d2020-06-29 11:50:01 +0100498 fs::path filename = params.m_ModelPath;
surmeh013537c2c2018-05-18 16:31:43 +0100499 filename.replace_extension("dot");
Rob Hughes9e10c2b2019-07-23 15:37:19 +0100500 std::fstream file(filename.c_str(), std::ios_base::out);
surmeh013537c2c2018-05-18 16:31:43 +0100501 optNet->SerializeToDot(file);
502 }
503
504 armnn::Status ret;
505 {
506 ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");
Matthew Sloyan4f29f152021-01-18 16:10:20 +0000507
508 const auto loading_start_time = armnn::GetTimeNow();
Francis Murtagh73d3e2e2021-04-29 14:23:04 +0100509 armnn::INetworkProperties networkProperties(params.m_AsyncEnabled,
510 armnn::MemorySource::Undefined,
Keith Davis554fa092021-07-20 11:25:22 +0100511 armnn::MemorySource::Undefined,
Keith Davisf4874862021-08-09 16:49:18 +0100512 enableProfiling,
Keith Davis4914d0c2021-08-18 17:14:05 +0100513 m_ProfilingDetailsMethod);
Sadik Armagana04a9d72021-04-27 10:02:10 +0100514 std::string errorMessage;
515 ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet), errorMessage, networkProperties);
Matthew Sloyan4f29f152021-01-18 16:10:20 +0000516
517 ARMNN_LOG(info) << "Network loading time: " << std::setprecision(2)
Sadik Armagan4a0844d2022-01-26 09:57:05 +0000518 << std::fixed << armnn::GetTimeDuration(loading_start_time).count() << " ms.";
Ryan OSheab5540542022-07-06 09:52:52 +0100519#if !defined(ARMNN_DISABLE_THREADS)
Finn Williamsf364d532021-06-09 17:07:33 +0100520 if (params.m_AsyncEnabled && params.m_ThreadPoolSize > 0)
521 {
522 std::vector<std::shared_ptr<armnn::IWorkingMemHandle>> memHandles;
523 for (size_t i = 0; i < params.m_ThreadPoolSize; ++i)
524 {
525 memHandles.emplace_back(m_Runtime->CreateWorkingMemHandle(m_NetworkIdentifier));
526 }
527
528 m_Threadpool = std::make_unique<armnn::Threadpool>(params.m_ThreadPoolSize,
529 m_Runtime.get(),
530 memHandles);
531 }
Ryan OSheab5540542022-07-06 09:52:52 +0100532#endif
surmeh013537c2c2018-05-18 16:31:43 +0100533 }
534
telsoa014fcda012018-03-09 14:13:49 +0000535 if (ret == armnn::Status::Failure)
536 {
537 throw armnn::Exception("IRuntime::LoadNetwork failed");
538 }
539 }
540
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000541 void CheckInputIndexIsValid(unsigned int inputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000542 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000543 if (m_InputBindings.size() < inputIndex + 1)
544 {
James Ward08f40162020-09-07 16:45:07 +0100545 throw armnn::Exception(fmt::format("Input index out of range: {}", inputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000546 }
telsoa014fcda012018-03-09 14:13:49 +0000547 }
548
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000549 void CheckOutputIndexIsValid(unsigned int outputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000550 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000551 if (m_OutputBindings.size() < outputIndex + 1)
552 {
James Ward08f40162020-09-07 16:45:07 +0100553 throw armnn::Exception(fmt::format("Output index out of range: {}", outputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000554 }
555 }
556
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100557 unsigned int GetInputSize(unsigned int inputIndex = 0u) const
558 {
559 CheckInputIndexIsValid(inputIndex);
560 return m_InputBindings[inputIndex].second.GetNumElements();
561 }
562
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000563 unsigned int GetOutputSize(unsigned int outputIndex = 0u) const
564 {
565 CheckOutputIndexIsValid(outputIndex);
566 return m_OutputBindings[outputIndex].second.GetNumElements();
567 }
568
James Conroy7b4886f2019-04-11 10:23:58 +0100569 std::chrono::duration<double, std::milli> Run(
Francis Murtagh40d27412021-10-28 11:11:35 +0100570 const std::vector<armnnUtils::TContainer>& inputContainers,
571 std::vector<armnnUtils::TContainer>& outputContainers)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000572 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000573 for (unsigned int i = 0; i < outputContainers.size(); ++i)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000574 {
575 const unsigned int expectedOutputDataSize = GetOutputSize(i);
Ferran Balaguerc602f292019-02-08 17:09:55 +0000576
James Ward6d9f5c52020-09-28 11:56:35 +0100577 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000578 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100579 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
Ferran Balaguerc602f292019-02-08 17:09:55 +0000580 if (actualOutputDataSize < expectedOutputDataSize)
581 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100582 unsigned int outputIndex = i;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000583 throw armnn::Exception(
James Ward08f40162020-09-07 16:45:07 +0100584 fmt::format("Not enough data for output #{0}: expected "
585 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
Ferran Balaguerc602f292019-02-08 17:09:55 +0000586 }
587 },
588 outputContainers[i]);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000589 }
telsoa01c577f2c2018-08-31 09:22:23 +0100590
591 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
telsoa01c577f2c2018-08-31 09:22:23 +0100592
James Conroy7b4886f2019-04-11 10:23:58 +0100593 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
alered01a7227ac2020-05-07 14:58:29 +0100594 const auto start_time = armnn::GetTimeNow();
James Conroy7b4886f2019-04-11 10:23:58 +0100595
Jim Flynn15425812022-02-15 16:53:13 +0000596 armnn::Status ret;
597 if (m_ImportInputsIfAligned)
598 {
599 std::vector<armnn::ImportedInputId> importedInputIds = m_Runtime->ImportInputs(
600 m_NetworkIdentifier, MakeInputTensors(inputContainers), armnn::MemorySource::Malloc);
601
602 std::vector<armnn::ImportedOutputId> importedOutputIds = m_Runtime->ImportOutputs(
603 m_NetworkIdentifier, MakeOutputTensors(outputContainers), armnn::MemorySource::Malloc);
604
605 ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
606 MakeInputTensors(inputContainers),
607 MakeOutputTensors(outputContainers),
608 importedInputIds,
609 importedOutputIds);
610 }
611 else
612 {
613 ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
614 MakeInputTensors(inputContainers),
615 MakeOutputTensors(outputContainers));
616 }
alered01a7227ac2020-05-07 14:58:29 +0100617 const auto duration = armnn::GetTimeDuration(start_time);
James Conroy7b4886f2019-04-11 10:23:58 +0100618
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100619 // if profiling is enabled print out the results
620 if (profiler && profiler->IsProfilingEnabled())
621 {
622 profiler->Print(std::cout);
623 }
624
telsoa014fcda012018-03-09 14:13:49 +0000625 if (ret == armnn::Status::Failure)
626 {
627 throw armnn::Exception("IRuntime::EnqueueWorkload failed");
628 }
James Conroy7b4886f2019-04-11 10:23:58 +0100629 else
630 {
alered01a7227ac2020-05-07 14:58:29 +0100631 return duration;
James Conroy7b4886f2019-04-11 10:23:58 +0100632 }
telsoa014fcda012018-03-09 14:13:49 +0000633 }
634
Finn Williamsf364d532021-06-09 17:07:33 +0100635 std::tuple<unsigned int, std::chrono::duration<double, std::milli>> RunAsync(
Sadik Armagana04a9d72021-04-27 10:02:10 +0100636 armnn::experimental::IWorkingMemHandle& workingMemHandleRef,
Francis Murtagh40d27412021-10-28 11:11:35 +0100637 const std::vector<armnnUtils::TContainer>& inputContainers,
638 std::vector<armnnUtils::TContainer>& outputContainers,
Finn Williamsf364d532021-06-09 17:07:33 +0100639 unsigned int inferenceID)
Sadik Armagana04a9d72021-04-27 10:02:10 +0100640 {
641 for (unsigned int i = 0; i < outputContainers.size(); ++i)
642 {
643 const unsigned int expectedOutputDataSize = GetOutputSize(i);
644
645 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
646 {
647 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
648 if (actualOutputDataSize < expectedOutputDataSize)
649 {
650 unsigned int outputIndex = i;
651 throw armnn::Exception(
652 fmt::format("Not enough data for output #{0}: expected "
653 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
654 }
655 },
656 outputContainers[i]);
657 }
658
659 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
Sadik Armagana04a9d72021-04-27 10:02:10 +0100660
661 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
662 const auto start_time = armnn::GetTimeNow();
663
664 armnn::Status ret = m_Runtime->Execute(workingMemHandleRef,
665 MakeInputTensors(inputContainers),
666 MakeOutputTensors(outputContainers));
Sadik Armagana04a9d72021-04-27 10:02:10 +0100667
668 const auto duration = armnn::GetTimeDuration(start_time);
669
670 // if profiling is enabled print out the results
671 if (profiler && profiler->IsProfilingEnabled())
672 {
673 profiler->Print(std::cout);
674 }
675
676 if (ret == armnn::Status::Failure)
677 {
678 throw armnn::Exception(
679 fmt::format("IRuntime::Execute asynchronously failed for network #{0} on inference #{1}",
680 m_NetworkIdentifier, inferenceID));
681 }
682 else
683 {
684 return std::make_tuple(inferenceID, duration);
685 }
686 }
687
Francis Murtagh40d27412021-10-28 11:11:35 +0100688 void RunAsync(const std::vector<armnnUtils::TContainer>& inputContainers,
689 std::vector<armnnUtils::TContainer>& outputContainers,
Finn Williamsf364d532021-06-09 17:07:33 +0100690 std::shared_ptr<armnn::IAsyncExecutionCallback> cb)
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100691 {
Ryan OSheab5540542022-07-06 09:52:52 +0100692#if !defined(ARMNN_DISABLE_THREADS)
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100693 for (unsigned int i = 0; i < outputContainers.size(); ++i)
694 {
695 const unsigned int expectedOutputDataSize = GetOutputSize(i);
696
697 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
698 {
699 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
700 if (actualOutputDataSize < expectedOutputDataSize)
701 {
702 unsigned int outputIndex = i;
703 throw armnn::Exception(
704 fmt::format("Not enough data for output #{0}: expected "
705 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
706 }
707 },
708 outputContainers[i]);
709 }
710
711 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100712
Finn Williamsf364d532021-06-09 17:07:33 +0100713 m_Threadpool->Schedule(m_NetworkIdentifier,
714 MakeInputTensors(inputContainers),
715 MakeOutputTensors(outputContainers),
716 armnn::QosExecPriority::Medium,
717 cb);
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100718
719 // if profiling is enabled print out the results
720 if (profiler && profiler->IsProfilingEnabled())
721 {
722 profiler->Print(std::cout);
723 }
Ryan OSheab5540542022-07-06 09:52:52 +0100724#endif
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100725 }
726
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100727 const armnn::BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100728 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000729 CheckInputIndexIsValid(inputIndex);
730 return m_InputBindings[inputIndex];
telsoa01c577f2c2018-08-31 09:22:23 +0100731 }
732
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100733 const std::vector<armnn::BindingPointInfo>& GetInputBindingInfos() const
telsoa01c577f2c2018-08-31 09:22:23 +0100734 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000735 return m_InputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100736 }
737
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100738 const armnn::BindingPointInfo& GetOutputBindingInfo(unsigned int outputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100739 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000740 CheckOutputIndexIsValid(outputIndex);
741 return m_OutputBindings[outputIndex];
742 }
743
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100744 const std::vector<armnn::BindingPointInfo>& GetOutputBindingInfos() const
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000745 {
746 return m_OutputBindings;
747 }
748
749 QuantizationParams GetQuantizationParams(unsigned int outputIndex = 0u) const
750 {
751 CheckOutputIndexIsValid(outputIndex);
752 return std::make_pair(m_OutputBindings[outputIndex].second.GetQuantizationScale(),
753 m_OutputBindings[outputIndex].second.GetQuantizationOffset());
754 }
755
Narumol Prangnawarat4628d052019-02-25 17:26:05 +0000756 QuantizationParams GetInputQuantizationParams(unsigned int inputIndex = 0u) const
757 {
758 CheckInputIndexIsValid(inputIndex);
759 return std::make_pair(m_InputBindings[inputIndex].second.GetQuantizationScale(),
760 m_InputBindings[inputIndex].second.GetQuantizationOffset());
761 }
762
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000763 std::vector<QuantizationParams> GetAllQuantizationParams() const
764 {
765 std::vector<QuantizationParams> quantizationParams;
766 for (unsigned int i = 0u; i < m_OutputBindings.size(); i++)
767 {
768 quantizationParams.push_back(GetQuantizationParams(i));
769 }
770 return quantizationParams;
telsoa01c577f2c2018-08-31 09:22:23 +0100771 }
772
Sadik Armagana04a9d72021-04-27 10:02:10 +0100773 std::unique_ptr<armnn::experimental::IWorkingMemHandle> CreateWorkingMemHandle()
774 {
775 return m_Runtime->CreateWorkingMemHandle(m_NetworkIdentifier);
776 }
777
telsoa014fcda012018-03-09 14:13:49 +0000778private:
telsoa01c577f2c2018-08-31 09:22:23 +0100779 armnn::NetworkId m_NetworkIdentifier;
780 std::shared_ptr<armnn::IRuntime> m_Runtime;
Ryan OSheab5540542022-07-06 09:52:52 +0100781#if !defined(ARMNN_DISABLE_THREADS)
Finn Williamsf364d532021-06-09 17:07:33 +0100782 std::unique_ptr<armnn::Threadpool> m_Threadpool;
Ryan OSheab5540542022-07-06 09:52:52 +0100783#endif
telsoa01c577f2c2018-08-31 09:22:23 +0100784
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100785 std::vector<armnn::BindingPointInfo> m_InputBindings;
786 std::vector<armnn::BindingPointInfo> m_OutputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100787 bool m_EnableProfiling;
Keith Davis4914d0c2021-08-18 17:14:05 +0100788 armnn::ProfilingDetailsMethod m_ProfilingDetailsMethod;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100789 std::string m_DynamicBackendsPath;
Jim Flynn15425812022-02-15 16:53:13 +0000790 bool m_ImportInputsIfAligned;
telsoa01c577f2c2018-08-31 09:22:23 +0100791
telsoa014fcda012018-03-09 14:13:49 +0000792 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000793 armnn::InputTensors MakeInputTensors(const std::vector<TContainer>& inputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000794 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100795 return armnnUtils::MakeInputTensors(m_InputBindings, inputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000796 }
797
798 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000799 armnn::OutputTensors MakeOutputTensors(std::vector<TContainer>& outputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000800 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100801 return armnnUtils::MakeOutputTensors(m_OutputBindings, outputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000802 }
Ferran Balaguerc602f292019-02-08 17:09:55 +0000803};