blob: d837fc1fcf5dadeff00389521bd932c6dd0d4bf9 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Teresa Charlin83b42912022-07-07 14:24:59 +01002// Copyright © 2022 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matteo Martincighc601aa62019-10-29 15:03:22 +00005
telsoa014fcda012018-03-09 14:13:49 +00006#pragma once
Matteo Martincighc601aa62019-10-29 15:03:22 +00007
David Monahan6bb47a72021-10-22 12:57:28 +01008
David Beckf0b48452018-10-19 15:20:56 +01009#include <armnn/ArmNN.hpp>
Finn Williamsf364d532021-06-09 17:07:33 +010010#include <armnn/Threadpool.hpp>
alered01a7227ac2020-05-07 14:58:29 +010011#include <armnn/Logging.hpp>
12#include <armnn/utility/Timer.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000013#include <armnn/BackendRegistry.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010014#include <armnn/utility/Assert.hpp>
Matthew Sloyan80c6b142020-09-08 12:00:32 +010015#include <armnn/utility/NumericCast.hpp>
Francis Murtagh40d27412021-10-28 11:11:35 +010016
17#include <armnnUtils/TContainer.hpp>
Teresa Charlin83b42912022-07-07 14:24:59 +010018#include "NetworkExecutionUtils/NetworkExecutionUtils.hpp"
Francis Murtagh40d27412021-10-28 11:11:35 +010019
Nikhil Raj7dcc6972021-04-30 15:44:24 +010020#include <common/include/ProfilingGuid.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010021
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000022#if defined(ARMNN_SERIALIZER)
Derek Lamberti0028d1b2019-02-20 13:57:42 +000023#include "armnnDeserializer/IDeserializer.hpp"
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000024#endif
telsoa01c577f2c2018-08-31 09:22:23 +010025#if defined(ARMNN_TF_LITE_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010026#include <armnnTfLiteParser/ITfLiteParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010027#endif
telsoa01c577f2c2018-08-31 09:22:23 +010028#if defined(ARMNN_ONNX_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010029#include <armnnOnnxParser/IOnnxParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010030#endif
telsoa014fcda012018-03-09 14:13:49 +000031
Rob Hughes9542f902021-07-14 09:48:54 +010032#include <armnnUtils/Filesystem.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000033#include <HeapProfiling.hpp>
Jim Flynn2fd61002019-05-03 12:54:26 +010034#include <TensorIOUtils.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000035
David Monahana8837bf2020-04-16 10:01:56 +010036#include "armnn/utility/StringUtils.hpp"
James Wardc89829f2020-10-12 14:17:36 +010037#include <cxxopts/cxxopts.hpp>
38#include "CxxoptsUtils.hpp"
James Ward08f40162020-09-07 16:45:07 +010039#include <fmt/format.h>
James Ward6d9f5c52020-09-28 11:56:35 +010040#include <mapbox/variant.hpp>
telsoa014fcda012018-03-09 14:13:49 +000041
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000042#include <algorithm>
43#include <iterator>
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010044#include <fstream>
telsoa014fcda012018-03-09 14:13:49 +000045#include <map>
46#include <string>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000047#include <vector>
telsoa01c577f2c2018-08-31 09:22:23 +010048#include <type_traits>
49
50namespace InferenceModelInternal
51{
Jim Flynnb4d7eae2019-05-01 14:44:27 +010052using BindingPointInfo = armnn::BindingPointInfo;
telsoa01c577f2c2018-08-31 09:22:23 +010053
54using QuantizationParams = std::pair<float,int32_t>;
55
56struct Params
57{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000058 std::string m_ModelPath;
59 std::vector<std::string> m_InputBindings;
60 std::vector<armnn::TensorShape> m_InputShapes;
61 std::vector<std::string> m_OutputBindings;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000062 std::vector<armnn::BackendId> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +010063 std::string m_DynamicBackendsPath;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000064 size_t m_SubgraphId;
Mike Kelly80512b02022-05-16 23:10:42 +010065 bool m_AllowExpandedDims;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000066 bool m_IsModelBinary;
67 bool m_VisualizePostOptimizationModel;
68 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +000069 bool m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +010070 bool m_PrintIntermediateLayers;
Keith Davis15f9c682022-10-14 15:50:33 +010071 bool m_PrintIntermediateLayersToFile;
Derek Lamberti132563c2019-12-02 16:06:40 +000072 bool m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +010073 bool m_InferOutputShape;
Sadik Armagana25886e2020-09-15 17:17:08 +010074 bool m_EnableFastMath;
Matthew Sloyan42432112021-01-08 10:30:51 +000075 bool m_SaveCachedNetwork;
Keith Davisf4874862021-08-09 16:49:18 +010076 bool m_OutputDetailsToStdOut;
Keith Davis4914d0c2021-08-18 17:14:05 +010077 bool m_OutputDetailsOnlyToStdOut;
Matthew Sloyan42432112021-01-08 10:30:51 +000078 std::string m_CachedNetworkFilePath;
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +000079 unsigned int m_NumberOfThreads;
Finn Williams40646322021-02-11 16:16:42 +000080 std::string m_MLGOTuningFilePath;
Sadik Armagana04a9d72021-04-27 10:02:10 +010081 bool m_AsyncEnabled;
Kevin Mayb4b3ac92021-05-21 16:42:21 +010082 size_t m_ThreadPoolSize;
Jim Flynn15425812022-02-15 16:53:13 +000083 bool m_ImportInputsIfAligned;
Finn Williams40646322021-02-11 16:16:42 +000084
telsoa01c577f2c2018-08-31 09:22:23 +010085
86 Params()
Matteo Martincigh00dda4a2019-08-14 11:42:30 +010087 : m_ComputeDevices{}
telsoa01c577f2c2018-08-31 09:22:23 +010088 , m_SubgraphId(0)
Mike Kelly80512b02022-05-16 23:10:42 +010089 , m_AllowExpandedDims(false)
telsoa01c577f2c2018-08-31 09:22:23 +010090 , m_IsModelBinary(true)
91 , m_VisualizePostOptimizationModel(false)
92 , m_EnableFp16TurboMode(false)
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +000093 , m_EnableBf16TurboMode(false)
Matthew Jackson54658b92019-08-27 15:35:59 +010094 , m_PrintIntermediateLayers(false)
Keith Davis15f9c682022-10-14 15:50:33 +010095 , m_PrintIntermediateLayersToFile(false)
Derek Lamberti132563c2019-12-02 16:06:40 +000096 , m_ParseUnsupported(false)
Sadik Armagana9c2ce12020-07-14 10:02:22 +010097 , m_InferOutputShape(false)
Sadik Armagana25886e2020-09-15 17:17:08 +010098 , m_EnableFastMath(false)
Matthew Sloyan42432112021-01-08 10:30:51 +000099 , m_SaveCachedNetwork(false)
Keith Davisf4874862021-08-09 16:49:18 +0100100 , m_OutputDetailsToStdOut(false)
Keith Davis4914d0c2021-08-18 17:14:05 +0100101 , m_OutputDetailsOnlyToStdOut(false)
Matthew Sloyan42432112021-01-08 10:30:51 +0000102 , m_CachedNetworkFilePath("")
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000103 , m_NumberOfThreads(0)
Finn Williams40646322021-02-11 16:16:42 +0000104 , m_MLGOTuningFilePath("")
Sadik Armagana04a9d72021-04-27 10:02:10 +0100105 , m_AsyncEnabled(false)
Kevin May94dd4db2021-05-26 16:01:08 +0100106 , m_ThreadPoolSize(0)
Jim Flynn15425812022-02-15 16:53:13 +0000107 , m_ImportInputsIfAligned(false)
telsoa01c577f2c2018-08-31 09:22:23 +0100108 {}
109};
110
111} // namespace InferenceModelInternal
112
113template <typename IParser>
114struct CreateNetworkImpl
115{
116public:
117 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100118
119 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100120 std::vector<armnn::BindingPointInfo>& inputBindings,
121 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100122 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000123 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100124
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000125 // Create a network from a file on disk
126 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100127
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000128 std::map<std::string, armnn::TensorShape> inputShapes;
129 if (!params.m_InputShapes.empty())
130 {
131 const size_t numInputShapes = params.m_InputShapes.size();
132 const size_t numInputBindings = params.m_InputBindings.size();
133 if (numInputShapes < numInputBindings)
134 {
James Ward08f40162020-09-07 16:45:07 +0100135 throw armnn::Exception(fmt::format(
136 "Not every input has its tensor shape specified: expected={0}, got={1}",
137 numInputBindings, numInputShapes));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000138 }
telsoa01c577f2c2018-08-31 09:22:23 +0100139
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000140 for (size_t i = 0; i < numInputShapes; i++)
141 {
142 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
143 }
144 }
telsoa01c577f2c2018-08-31 09:22:23 +0100145
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000146 std::vector<std::string> requestedOutputs = params.m_OutputBindings;
147 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
148
149 {
150 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
151 // Handle text and binary input differently by calling the corresponding parser function
152 network = (params.m_IsModelBinary ?
153 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
154 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
155 }
156
157 for (const std::string& inputLayerName : params.m_InputBindings)
158 {
159 inputBindings.push_back(parser->GetNetworkInputBindingInfo(inputLayerName));
160 }
161
162 for (const std::string& outputLayerName : params.m_OutputBindings)
163 {
164 outputBindings.push_back(parser->GetNetworkOutputBindingInfo(outputLayerName));
165 }
166
167 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100168 }
169};
170
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000171#if defined(ARMNN_SERIALIZER)
172template <>
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000173struct CreateNetworkImpl<armnnDeserializer::IDeserializer>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000174{
175public:
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000176 using IParser = armnnDeserializer::IDeserializer;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000177 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000178
179 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100180 std::vector<armnn::BindingPointInfo>& inputBindings,
181 std::vector<armnn::BindingPointInfo>& outputBindings)
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000182 {
183 auto parser(IParser::Create());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100184 ARMNN_ASSERT(parser);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000185
186 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
187
188 {
189 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000190
Francis Murtagh532a29d2020-06-29 11:50:01 +0100191 std::error_code errorCode;
192 fs::path pathToFile(params.m_ModelPath);
193 if (!fs::exists(pathToFile, errorCode))
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000194 {
James Ward08f40162020-09-07 16:45:07 +0100195 throw armnn::FileNotFoundException(fmt::format("Cannot find the file ({0}) errorCode: {1} {2}",
196 params.m_ModelPath,
197 errorCode.message(),
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000198 CHECK_LOCATION().AsString()));
199 }
200 std::ifstream file(params.m_ModelPath, std::ios::binary);
201
202 network = parser->CreateNetworkFromBinary(file);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000203 }
204
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100205 unsigned int subgraphId = armnn::numeric_cast<unsigned int>(params.m_SubgraphId);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000206
207 for (const std::string& inputLayerName : params.m_InputBindings)
208 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000209 armnnDeserializer::BindingPointInfo inputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100210 parser->GetNetworkInputBindingInfo(subgraphId, inputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000211 inputBindings.push_back(std::make_pair(inputBinding.m_BindingId, inputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000212 }
213
214 for (const std::string& outputLayerName : params.m_OutputBindings)
215 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000216 armnnDeserializer::BindingPointInfo outputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100217 parser->GetNetworkOutputBindingInfo(subgraphId, outputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000218 outputBindings.push_back(std::make_pair(outputBinding.m_BindingId, outputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000219 }
220
221 return network;
222 }
223};
224#endif
225
telsoa01c577f2c2018-08-31 09:22:23 +0100226#if defined(ARMNN_TF_LITE_PARSER)
227template <>
228struct CreateNetworkImpl<armnnTfLiteParser::ITfLiteParser>
229{
230public:
231 using IParser = armnnTfLiteParser::ITfLiteParser;
232 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100233
234 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100235 std::vector<armnn::BindingPointInfo>& inputBindings,
236 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100237 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000238 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100239
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000240 // Create a network from a file on disk
Derek Lamberti132563c2019-12-02 16:06:40 +0000241 IParser::TfLiteParserOptions options;
Mike Kelly80512b02022-05-16 23:10:42 +0100242 options.m_AllowExpandedDims = params.m_AllowExpandedDims;
Derek Lamberti132563c2019-12-02 16:06:40 +0000243 options.m_StandInLayerForUnsupported = params.m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100244 options.m_InferAndValidate = params.m_InferOutputShape;
Derek Lamberti132563c2019-12-02 16:06:40 +0000245 auto parser(IParser::Create(options));
telsoa01c577f2c2018-08-31 09:22:23 +0100246
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000247 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100248
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000249 {
250 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
251 network = parser->CreateNetworkFromBinaryFile(modelPath.c_str());
252 }
telsoa01c577f2c2018-08-31 09:22:23 +0100253
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000254 for (const std::string& inputLayerName : params.m_InputBindings)
255 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100256 armnn::BindingPointInfo inputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000257 parser->GetNetworkInputBindingInfo(params.m_SubgraphId, inputLayerName);
258 inputBindings.push_back(inputBinding);
259 }
260
261 for (const std::string& outputLayerName : params.m_OutputBindings)
262 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100263 armnn::BindingPointInfo outputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000264 parser->GetNetworkOutputBindingInfo(params.m_SubgraphId, outputLayerName);
265 outputBindings.push_back(outputBinding);
266 }
267
268 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100269 }
270};
271#endif
272
273#if defined(ARMNN_ONNX_PARSER)
274template <>
275struct CreateNetworkImpl<armnnOnnxParser::IOnnxParser>
276{
277public:
278 using IParser = armnnOnnxParser::IOnnxParser;
279 using Params = InferenceModelInternal::Params;
280 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
281
282 static armnn::INetworkPtr Create(const Params& params,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000283 std::vector<BindingPointInfo>& inputBindings,
284 std::vector<BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100285 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000286 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100287
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000288 // Create a network from a file on disk
289 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100290
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000291 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100292
Narumol Prangnawarat1b11f322021-10-13 11:44:50 +0100293 std::map<std::string, armnn::TensorShape> inputShapes;
294 if (!params.m_InputShapes.empty())
295 {
296 const size_t numInputShapes = params.m_InputShapes.size();
297 const size_t numInputBindings = params.m_InputBindings.size();
298 if (numInputShapes < numInputBindings)
299 {
300 throw armnn::Exception(fmt::format(
301 "Not every input has its tensor shape specified: expected={0}, got={1}",
302 numInputBindings, numInputShapes));
303 }
304
305 for (size_t i = 0; i < numInputShapes; i++)
306 {
307 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
308 }
309
310 {
311 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
312 network = (params.m_IsModelBinary ?
313 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes) :
314 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes));
315 }
316 }
317
318 else
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000319 {
320 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
321 network = (params.m_IsModelBinary ?
322 parser->CreateNetworkFromBinaryFile(modelPath.c_str()) :
323 parser->CreateNetworkFromTextFile(modelPath.c_str()));
324 }
telsoa01c577f2c2018-08-31 09:22:23 +0100325
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000326 for (const std::string& inputLayerName : params.m_InputBindings)
327 {
328 BindingPointInfo inputBinding = parser->GetNetworkInputBindingInfo(inputLayerName);
329 inputBindings.push_back(inputBinding);
330 }
331
332 for (const std::string& outputLayerName : params.m_OutputBindings)
333 {
334 BindingPointInfo outputBinding = parser->GetNetworkOutputBindingInfo(outputLayerName);
335 outputBindings.push_back(outputBinding);
336 }
337
338 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100339 }
340};
341#endif
telsoa014fcda012018-03-09 14:13:49 +0000342
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000343
telsoa014fcda012018-03-09 14:13:49 +0000344
345template <typename IParser, typename TDataType>
346class InferenceModel
347{
348public:
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000349 using DataType = TDataType;
350 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000351 using QuantizationParams = InferenceModelInternal::QuantizationParams;
David Monahan6bb47a72021-10-22 12:57:28 +0100352
telsoa014fcda012018-03-09 14:13:49 +0000353
354 struct CommandLineOptions
355 {
356 std::string m_ModelDir;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000357 std::vector<std::string> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100358 std::string m_DynamicBackendsPath;
surmeh013537c2c2018-05-18 16:31:43 +0100359 bool m_VisualizePostOptimizationModel;
telsoa01c577f2c2018-08-31 09:22:23 +0100360 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000361 bool m_EnableBf16TurboMode;
Pablo Tello507f39d2019-04-15 15:44:39 +0100362 std::string m_Labels;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000363
364 std::vector<armnn::BackendId> GetComputeDevicesAsBackendIds()
365 {
366 std::vector<armnn::BackendId> backendIds;
367 std::copy(m_ComputeDevices.begin(), m_ComputeDevices.end(), std::back_inserter(backendIds));
368 return backendIds;
369 }
telsoa014fcda012018-03-09 14:13:49 +0000370 };
371
James Wardc89829f2020-10-12 14:17:36 +0100372 static void AddCommandLineOptions(cxxopts::Options& options,
373 CommandLineOptions& cLineOptions, std::vector<std::string>& required)
telsoa014fcda012018-03-09 14:13:49 +0000374 {
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000375 const std::vector<std::string> defaultComputes = { "CpuAcc", "CpuRef" };
David Beckf0b48452018-10-19 15:20:56 +0100376
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100377 const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
378 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
379
James Wardc89829f2020-10-12 14:17:36 +0100380 options
381 .allow_unrecognised_options()
382 .add_options()
Nikhil Raj6dd178f2021-04-02 22:04:39 +0100383 ("m,model-dir", "Path to directory containing model files (.prototxt/.tflite)",
James Wardc89829f2020-10-12 14:17:36 +0100384 cxxopts::value<std::string>(cLineOptions.m_ModelDir))
385 ("c,compute", backendsMessage.c_str(),
386 cxxopts::value<std::vector<std::string>>(cLineOptions.m_ComputeDevices)->default_value("CpuRef"))
387 ("b,dynamic-backends-path",
388 "Path where to load any available dynamic backend from. "
389 "If left empty (the default), dynamic backends will not be used.",
390 cxxopts::value(cLineOptions.m_DynamicBackendsPath))
391 ("l,labels",
392 "Text file containing one image filename - correct label pair per line, "
393 "used to test the accuracy of the network.", cxxopts::value<std::string>(cLineOptions.m_Labels))
394 ("v,visualize-optimized-model",
395 "Produce a dot file useful for visualizing the graph post optimization."
396 "The file will have the same name as the model with the .dot extention.",
397 cxxopts::value<bool>(cLineOptions.m_VisualizePostOptimizationModel)->default_value("false"))
398 ("fp16-turbo-mode",
399 "If this option is enabled FP32 layers, weights and biases will be converted "
400 "to FP16 where the backend supports it.",
401 cxxopts::value<bool>(cLineOptions.m_EnableFp16TurboMode)->default_value("false"))
402 ("bf16-turbo-mode",
403 "If this option is enabled FP32 layers, weights and biases will be converted "
404 "to BF16 where the backend supports it.",
405 cxxopts::value<bool>(cLineOptions.m_EnableBf16TurboMode)->default_value("false"));
406
407 required.emplace_back("model-dir");
telsoa014fcda012018-03-09 14:13:49 +0000408 }
409
Matthew Bentham3e68b972019-04-09 13:10:46 +0100410 InferenceModel(const Params& params,
411 bool enableProfiling,
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100412 const std::string& dynamicBackendsPath,
Matthew Bentham3e68b972019-04-09 13:10:46 +0100413 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
Keith Davis4914d0c2021-08-18 17:14:05 +0100414 : m_EnableProfiling(enableProfiling),
Jim Flynn15425812022-02-15 16:53:13 +0000415 m_ProfilingDetailsMethod(armnn::ProfilingDetailsMethod::Undefined),
416 m_DynamicBackendsPath(dynamicBackendsPath),
417 m_ImportInputsIfAligned(params.m_ImportInputsIfAligned)
telsoa014fcda012018-03-09 14:13:49 +0000418 {
telsoa01c577f2c2018-08-31 09:22:23 +0100419 if (runtime)
telsoa014fcda012018-03-09 14:13:49 +0000420 {
telsoa01c577f2c2018-08-31 09:22:23 +0100421 m_Runtime = runtime;
telsoa014fcda012018-03-09 14:13:49 +0000422 }
telsoa01c577f2c2018-08-31 09:22:23 +0100423 else
telsoa014fcda012018-03-09 14:13:49 +0000424 {
telsoa01c577f2c2018-08-31 09:22:23 +0100425 armnn::IRuntime::CreationOptions options;
Nina Drozd549ae372018-09-10 14:26:44 +0100426 options.m_EnableGpuProfiling = m_EnableProfiling;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100427 options.m_DynamicBackendsPath = m_DynamicBackendsPath;
Finn Williamsf364d532021-06-09 17:07:33 +0100428 m_Runtime = armnn::IRuntime::Create(options);
surmeh013537c2c2018-05-18 16:31:43 +0100429 }
telsoa014fcda012018-03-09 14:13:49 +0000430
Keith Davis4914d0c2021-08-18 17:14:05 +0100431 // Configure the Profiler if the the profiling details are opted for
432 if (params.m_OutputDetailsOnlyToStdOut)
433 m_ProfilingDetailsMethod = armnn::ProfilingDetailsMethod::DetailsOnly;
434 else if (params.m_OutputDetailsToStdOut)
435 m_ProfilingDetailsMethod = armnn::ProfilingDetailsMethod::DetailsWithEvents;
436
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100437 std::string invalidBackends;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000438 if (!CheckRequestedBackendsAreValid(params.m_ComputeDevices, armnn::Optional<std::string&>(invalidBackends)))
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100439 {
440 throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends);
441 }
442
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100443 armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork*){}};
surmeh013537c2c2018-05-18 16:31:43 +0100444 {
Finn Williams4422cec2021-03-22 17:51:06 +0000445 const auto parsing_start_time = armnn::GetTimeNow();
446 armnn::INetworkPtr network = CreateNetworkImpl<IParser>::Create(params, m_InputBindings, m_OutputBindings);
447
448 ARMNN_LOG(info) << "Network parsing time: " << std::setprecision(2)
Sadik Armagan4a0844d2022-01-26 09:57:05 +0000449 << std::fixed << armnn::GetTimeDuration(parsing_start_time).count() << " ms.";
Finn Williams4422cec2021-03-22 17:51:06 +0000450
surmeh013537c2c2018-05-18 16:31:43 +0100451 ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
telsoa01c577f2c2018-08-31 09:22:23 +0100452
453 armnn::OptimizerOptions options;
454 options.m_ReduceFp32ToFp16 = params.m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000455 options.m_ReduceFp32ToBf16 = params.m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +0100456 options.m_Debug = params.m_PrintIntermediateLayers;
Keith Davis15f9c682022-10-14 15:50:33 +0100457 options.m_DebugToFile = params.m_PrintIntermediateLayersToFile;
Finn Williamsd218d982021-08-09 13:00:08 +0100458 options.m_shapeInferenceMethod = params.m_InferOutputShape ?
459 armnn::ShapeInferenceMethod::InferAndValidate : armnn::ShapeInferenceMethod::ValidateOnly;
Derek Lamberti44c4c982021-10-14 00:28:37 +0100460 options.m_ProfilingEnabled = m_EnableProfiling;
Finn Williamsd218d982021-08-09 13:00:08 +0100461
Sadik Armagana25886e2020-09-15 17:17:08 +0100462 armnn::BackendOptions gpuAcc("GpuAcc",
463 {
Matthew Sloyan42432112021-01-08 10:30:51 +0000464 { "FastMathEnabled", params.m_EnableFastMath },
465 { "SaveCachedNetwork", params.m_SaveCachedNetwork },
Finn Williams40646322021-02-11 16:16:42 +0000466 { "CachedNetworkFilePath", params.m_CachedNetworkFilePath },
467 { "MLGOTuningFilePath", params.m_MLGOTuningFilePath }
Sadik Armagana25886e2020-09-15 17:17:08 +0100468 });
Finn Williams40646322021-02-11 16:16:42 +0000469
Sadik Armagana25886e2020-09-15 17:17:08 +0100470 armnn::BackendOptions cpuAcc("CpuAcc",
471 {
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000472 { "FastMathEnabled", params.m_EnableFastMath },
473 { "NumberOfThreads", params.m_NumberOfThreads }
Sadik Armagana25886e2020-09-15 17:17:08 +0100474 });
475 options.m_ModelOptions.push_back(gpuAcc);
476 options.m_ModelOptions.push_back(cpuAcc);
477
alered01a7227ac2020-05-07 14:58:29 +0100478 const auto optimization_start_time = armnn::GetTimeNow();
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000479 optNet = armnn::Optimize(*network, params.m_ComputeDevices, m_Runtime->GetDeviceSpec(), options);
alered01a7227ac2020-05-07 14:58:29 +0100480
481 ARMNN_LOG(info) << "Optimization time: " << std::setprecision(2)
Sadik Armagan4a0844d2022-01-26 09:57:05 +0000482 << std::fixed << armnn::GetTimeDuration(optimization_start_time).count() << " ms.";
alered01a7227ac2020-05-07 14:58:29 +0100483
telsoa01c577f2c2018-08-31 09:22:23 +0100484 if (!optNet)
485 {
486 throw armnn::Exception("Optimize returned nullptr");
487 }
Finn Williams4422cec2021-03-22 17:51:06 +0000488
489
surmeh013537c2c2018-05-18 16:31:43 +0100490 }
telsoa014fcda012018-03-09 14:13:49 +0000491
surmeh013537c2c2018-05-18 16:31:43 +0100492 if (params.m_VisualizePostOptimizationModel)
493 {
Francis Murtagh532a29d2020-06-29 11:50:01 +0100494 fs::path filename = params.m_ModelPath;
surmeh013537c2c2018-05-18 16:31:43 +0100495 filename.replace_extension("dot");
Rob Hughes9e10c2b2019-07-23 15:37:19 +0100496 std::fstream file(filename.c_str(), std::ios_base::out);
surmeh013537c2c2018-05-18 16:31:43 +0100497 optNet->SerializeToDot(file);
498 }
499
500 armnn::Status ret;
501 {
502 ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");
Matthew Sloyan4f29f152021-01-18 16:10:20 +0000503
504 const auto loading_start_time = armnn::GetTimeNow();
Francis Murtagh73d3e2e2021-04-29 14:23:04 +0100505 armnn::INetworkProperties networkProperties(params.m_AsyncEnabled,
506 armnn::MemorySource::Undefined,
Keith Davis554fa092021-07-20 11:25:22 +0100507 armnn::MemorySource::Undefined,
Keith Davisf4874862021-08-09 16:49:18 +0100508 enableProfiling,
Keith Davis4914d0c2021-08-18 17:14:05 +0100509 m_ProfilingDetailsMethod);
Sadik Armagana04a9d72021-04-27 10:02:10 +0100510 std::string errorMessage;
511 ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet), errorMessage, networkProperties);
Matthew Sloyan4f29f152021-01-18 16:10:20 +0000512
513 ARMNN_LOG(info) << "Network loading time: " << std::setprecision(2)
Sadik Armagan4a0844d2022-01-26 09:57:05 +0000514 << std::fixed << armnn::GetTimeDuration(loading_start_time).count() << " ms.";
Finn Williamsf364d532021-06-09 17:07:33 +0100515
516 if (params.m_AsyncEnabled && params.m_ThreadPoolSize > 0)
517 {
518 std::vector<std::shared_ptr<armnn::IWorkingMemHandle>> memHandles;
519 for (size_t i = 0; i < params.m_ThreadPoolSize; ++i)
520 {
521 memHandles.emplace_back(m_Runtime->CreateWorkingMemHandle(m_NetworkIdentifier));
522 }
523
524 m_Threadpool = std::make_unique<armnn::Threadpool>(params.m_ThreadPoolSize,
525 m_Runtime.get(),
526 memHandles);
527 }
surmeh013537c2c2018-05-18 16:31:43 +0100528 }
529
telsoa014fcda012018-03-09 14:13:49 +0000530 if (ret == armnn::Status::Failure)
531 {
532 throw armnn::Exception("IRuntime::LoadNetwork failed");
533 }
534 }
535
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000536 void CheckInputIndexIsValid(unsigned int inputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000537 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000538 if (m_InputBindings.size() < inputIndex + 1)
539 {
James Ward08f40162020-09-07 16:45:07 +0100540 throw armnn::Exception(fmt::format("Input index out of range: {}", inputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000541 }
telsoa014fcda012018-03-09 14:13:49 +0000542 }
543
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000544 void CheckOutputIndexIsValid(unsigned int outputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000545 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000546 if (m_OutputBindings.size() < outputIndex + 1)
547 {
James Ward08f40162020-09-07 16:45:07 +0100548 throw armnn::Exception(fmt::format("Output index out of range: {}", outputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000549 }
550 }
551
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100552 unsigned int GetInputSize(unsigned int inputIndex = 0u) const
553 {
554 CheckInputIndexIsValid(inputIndex);
555 return m_InputBindings[inputIndex].second.GetNumElements();
556 }
557
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000558 unsigned int GetOutputSize(unsigned int outputIndex = 0u) const
559 {
560 CheckOutputIndexIsValid(outputIndex);
561 return m_OutputBindings[outputIndex].second.GetNumElements();
562 }
563
James Conroy7b4886f2019-04-11 10:23:58 +0100564 std::chrono::duration<double, std::milli> Run(
Francis Murtagh40d27412021-10-28 11:11:35 +0100565 const std::vector<armnnUtils::TContainer>& inputContainers,
566 std::vector<armnnUtils::TContainer>& outputContainers)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000567 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000568 for (unsigned int i = 0; i < outputContainers.size(); ++i)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000569 {
570 const unsigned int expectedOutputDataSize = GetOutputSize(i);
Ferran Balaguerc602f292019-02-08 17:09:55 +0000571
James Ward6d9f5c52020-09-28 11:56:35 +0100572 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000573 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100574 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
Ferran Balaguerc602f292019-02-08 17:09:55 +0000575 if (actualOutputDataSize < expectedOutputDataSize)
576 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100577 unsigned int outputIndex = i;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000578 throw armnn::Exception(
James Ward08f40162020-09-07 16:45:07 +0100579 fmt::format("Not enough data for output #{0}: expected "
580 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
Ferran Balaguerc602f292019-02-08 17:09:55 +0000581 }
582 },
583 outputContainers[i]);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000584 }
telsoa01c577f2c2018-08-31 09:22:23 +0100585
586 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
telsoa01c577f2c2018-08-31 09:22:23 +0100587
James Conroy7b4886f2019-04-11 10:23:58 +0100588 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
alered01a7227ac2020-05-07 14:58:29 +0100589 const auto start_time = armnn::GetTimeNow();
James Conroy7b4886f2019-04-11 10:23:58 +0100590
Jim Flynn15425812022-02-15 16:53:13 +0000591 armnn::Status ret;
592 if (m_ImportInputsIfAligned)
593 {
594 std::vector<armnn::ImportedInputId> importedInputIds = m_Runtime->ImportInputs(
595 m_NetworkIdentifier, MakeInputTensors(inputContainers), armnn::MemorySource::Malloc);
596
597 std::vector<armnn::ImportedOutputId> importedOutputIds = m_Runtime->ImportOutputs(
598 m_NetworkIdentifier, MakeOutputTensors(outputContainers), armnn::MemorySource::Malloc);
599
600 ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
601 MakeInputTensors(inputContainers),
602 MakeOutputTensors(outputContainers),
603 importedInputIds,
604 importedOutputIds);
605 }
606 else
607 {
608 ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
609 MakeInputTensors(inputContainers),
610 MakeOutputTensors(outputContainers));
611 }
alered01a7227ac2020-05-07 14:58:29 +0100612 const auto duration = armnn::GetTimeDuration(start_time);
James Conroy7b4886f2019-04-11 10:23:58 +0100613
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100614 // if profiling is enabled print out the results
615 if (profiler && profiler->IsProfilingEnabled())
616 {
617 profiler->Print(std::cout);
618 }
619
telsoa014fcda012018-03-09 14:13:49 +0000620 if (ret == armnn::Status::Failure)
621 {
622 throw armnn::Exception("IRuntime::EnqueueWorkload failed");
623 }
James Conroy7b4886f2019-04-11 10:23:58 +0100624 else
625 {
alered01a7227ac2020-05-07 14:58:29 +0100626 return duration;
James Conroy7b4886f2019-04-11 10:23:58 +0100627 }
telsoa014fcda012018-03-09 14:13:49 +0000628 }
629
Finn Williamsf364d532021-06-09 17:07:33 +0100630 std::tuple<unsigned int, std::chrono::duration<double, std::milli>> RunAsync(
Sadik Armagana04a9d72021-04-27 10:02:10 +0100631 armnn::experimental::IWorkingMemHandle& workingMemHandleRef,
Francis Murtagh40d27412021-10-28 11:11:35 +0100632 const std::vector<armnnUtils::TContainer>& inputContainers,
633 std::vector<armnnUtils::TContainer>& outputContainers,
Finn Williamsf364d532021-06-09 17:07:33 +0100634 unsigned int inferenceID)
Sadik Armagana04a9d72021-04-27 10:02:10 +0100635 {
636 for (unsigned int i = 0; i < outputContainers.size(); ++i)
637 {
638 const unsigned int expectedOutputDataSize = GetOutputSize(i);
639
640 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
641 {
642 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
643 if (actualOutputDataSize < expectedOutputDataSize)
644 {
645 unsigned int outputIndex = i;
646 throw armnn::Exception(
647 fmt::format("Not enough data for output #{0}: expected "
648 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
649 }
650 },
651 outputContainers[i]);
652 }
653
654 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
Sadik Armagana04a9d72021-04-27 10:02:10 +0100655
656 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
657 const auto start_time = armnn::GetTimeNow();
658
659 armnn::Status ret = m_Runtime->Execute(workingMemHandleRef,
660 MakeInputTensors(inputContainers),
661 MakeOutputTensors(outputContainers));
Sadik Armagana04a9d72021-04-27 10:02:10 +0100662
663 const auto duration = armnn::GetTimeDuration(start_time);
664
665 // if profiling is enabled print out the results
666 if (profiler && profiler->IsProfilingEnabled())
667 {
668 profiler->Print(std::cout);
669 }
670
671 if (ret == armnn::Status::Failure)
672 {
673 throw armnn::Exception(
674 fmt::format("IRuntime::Execute asynchronously failed for network #{0} on inference #{1}",
675 m_NetworkIdentifier, inferenceID));
676 }
677 else
678 {
679 return std::make_tuple(inferenceID, duration);
680 }
681 }
682
Francis Murtagh40d27412021-10-28 11:11:35 +0100683 void RunAsync(const std::vector<armnnUtils::TContainer>& inputContainers,
684 std::vector<armnnUtils::TContainer>& outputContainers,
Finn Williamsf364d532021-06-09 17:07:33 +0100685 std::shared_ptr<armnn::IAsyncExecutionCallback> cb)
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100686 {
687 for (unsigned int i = 0; i < outputContainers.size(); ++i)
688 {
689 const unsigned int expectedOutputDataSize = GetOutputSize(i);
690
691 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
692 {
693 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
694 if (actualOutputDataSize < expectedOutputDataSize)
695 {
696 unsigned int outputIndex = i;
697 throw armnn::Exception(
698 fmt::format("Not enough data for output #{0}: expected "
699 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
700 }
701 },
702 outputContainers[i]);
703 }
704
705 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100706
Finn Williamsf364d532021-06-09 17:07:33 +0100707 m_Threadpool->Schedule(m_NetworkIdentifier,
708 MakeInputTensors(inputContainers),
709 MakeOutputTensors(outputContainers),
710 armnn::QosExecPriority::Medium,
711 cb);
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100712
713 // if profiling is enabled print out the results
714 if (profiler && profiler->IsProfilingEnabled())
715 {
716 profiler->Print(std::cout);
717 }
718 }
719
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100720 const armnn::BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100721 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000722 CheckInputIndexIsValid(inputIndex);
723 return m_InputBindings[inputIndex];
telsoa01c577f2c2018-08-31 09:22:23 +0100724 }
725
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100726 const std::vector<armnn::BindingPointInfo>& GetInputBindingInfos() const
telsoa01c577f2c2018-08-31 09:22:23 +0100727 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000728 return m_InputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100729 }
730
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100731 const armnn::BindingPointInfo& GetOutputBindingInfo(unsigned int outputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100732 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000733 CheckOutputIndexIsValid(outputIndex);
734 return m_OutputBindings[outputIndex];
735 }
736
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100737 const std::vector<armnn::BindingPointInfo>& GetOutputBindingInfos() const
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000738 {
739 return m_OutputBindings;
740 }
741
742 QuantizationParams GetQuantizationParams(unsigned int outputIndex = 0u) const
743 {
744 CheckOutputIndexIsValid(outputIndex);
745 return std::make_pair(m_OutputBindings[outputIndex].second.GetQuantizationScale(),
746 m_OutputBindings[outputIndex].second.GetQuantizationOffset());
747 }
748
Narumol Prangnawarat4628d052019-02-25 17:26:05 +0000749 QuantizationParams GetInputQuantizationParams(unsigned int inputIndex = 0u) const
750 {
751 CheckInputIndexIsValid(inputIndex);
752 return std::make_pair(m_InputBindings[inputIndex].second.GetQuantizationScale(),
753 m_InputBindings[inputIndex].second.GetQuantizationOffset());
754 }
755
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000756 std::vector<QuantizationParams> GetAllQuantizationParams() const
757 {
758 std::vector<QuantizationParams> quantizationParams;
759 for (unsigned int i = 0u; i < m_OutputBindings.size(); i++)
760 {
761 quantizationParams.push_back(GetQuantizationParams(i));
762 }
763 return quantizationParams;
telsoa01c577f2c2018-08-31 09:22:23 +0100764 }
765
Sadik Armagana04a9d72021-04-27 10:02:10 +0100766 std::unique_ptr<armnn::experimental::IWorkingMemHandle> CreateWorkingMemHandle()
767 {
768 return m_Runtime->CreateWorkingMemHandle(m_NetworkIdentifier);
769 }
770
telsoa014fcda012018-03-09 14:13:49 +0000771private:
telsoa01c577f2c2018-08-31 09:22:23 +0100772 armnn::NetworkId m_NetworkIdentifier;
773 std::shared_ptr<armnn::IRuntime> m_Runtime;
Finn Williamsf364d532021-06-09 17:07:33 +0100774 std::unique_ptr<armnn::Threadpool> m_Threadpool;
telsoa01c577f2c2018-08-31 09:22:23 +0100775
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100776 std::vector<armnn::BindingPointInfo> m_InputBindings;
777 std::vector<armnn::BindingPointInfo> m_OutputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100778 bool m_EnableProfiling;
Keith Davis4914d0c2021-08-18 17:14:05 +0100779 armnn::ProfilingDetailsMethod m_ProfilingDetailsMethod;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100780 std::string m_DynamicBackendsPath;
Jim Flynn15425812022-02-15 16:53:13 +0000781 bool m_ImportInputsIfAligned;
telsoa01c577f2c2018-08-31 09:22:23 +0100782
telsoa014fcda012018-03-09 14:13:49 +0000783 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000784 armnn::InputTensors MakeInputTensors(const std::vector<TContainer>& inputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000785 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100786 return armnnUtils::MakeInputTensors(m_InputBindings, inputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000787 }
788
789 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000790 armnn::OutputTensors MakeOutputTensors(std::vector<TContainer>& outputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000791 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100792 return armnnUtils::MakeOutputTensors(m_OutputBindings, outputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000793 }
Ferran Balaguerc602f292019-02-08 17:09:55 +0000794};