blob: 93716e1a6f6f6d16135d3f69ab645292bf1c9634 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Sadik Armagana9c2ce12020-07-14 10:02:22 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matteo Martincighc601aa62019-10-29 15:03:22 +00005
telsoa014fcda012018-03-09 14:13:49 +00006#pragma once
Matteo Martincighc601aa62019-10-29 15:03:22 +00007
David Monahan6bb47a72021-10-22 12:57:28 +01008
David Beckf0b48452018-10-19 15:20:56 +01009#include <armnn/ArmNN.hpp>
Finn Williamsf364d532021-06-09 17:07:33 +010010#include <armnn/Threadpool.hpp>
alered01a7227ac2020-05-07 14:58:29 +010011#include <armnn/Logging.hpp>
12#include <armnn/utility/Timer.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000013#include <armnn/BackendRegistry.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010014#include <armnn/utility/Assert.hpp>
Matthew Sloyan80c6b142020-09-08 12:00:32 +010015#include <armnn/utility/NumericCast.hpp>
Francis Murtagh40d27412021-10-28 11:11:35 +010016
17#include <armnnUtils/TContainer.hpp>
18
Nikhil Raj7dcc6972021-04-30 15:44:24 +010019#include <common/include/ProfilingGuid.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010020
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000021#if defined(ARMNN_SERIALIZER)
Derek Lamberti0028d1b2019-02-20 13:57:42 +000022#include "armnnDeserializer/IDeserializer.hpp"
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000023#endif
telsoa01c577f2c2018-08-31 09:22:23 +010024#if defined(ARMNN_TF_LITE_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010025#include <armnnTfLiteParser/ITfLiteParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010026#endif
telsoa01c577f2c2018-08-31 09:22:23 +010027#if defined(ARMNN_ONNX_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010028#include <armnnOnnxParser/IOnnxParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010029#endif
telsoa014fcda012018-03-09 14:13:49 +000030
Rob Hughes9542f902021-07-14 09:48:54 +010031#include <armnnUtils/Filesystem.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000032#include <HeapProfiling.hpp>
Jim Flynn2fd61002019-05-03 12:54:26 +010033#include <TensorIOUtils.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000034
David Monahana8837bf2020-04-16 10:01:56 +010035#include "armnn/utility/StringUtils.hpp"
James Wardc89829f2020-10-12 14:17:36 +010036#include <cxxopts/cxxopts.hpp>
37#include "CxxoptsUtils.hpp"
James Ward08f40162020-09-07 16:45:07 +010038#include <fmt/format.h>
James Ward6d9f5c52020-09-28 11:56:35 +010039#include <mapbox/variant.hpp>
telsoa014fcda012018-03-09 14:13:49 +000040
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000041#include <algorithm>
42#include <iterator>
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010043#include <fstream>
telsoa014fcda012018-03-09 14:13:49 +000044#include <map>
45#include <string>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000046#include <vector>
telsoa01c577f2c2018-08-31 09:22:23 +010047#include <type_traits>
48
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010049namespace
50{
51
52inline bool CheckRequestedBackendsAreValid(const std::vector<armnn::BackendId>& backendIds,
53 armnn::Optional<std::string&> invalidBackendIds = armnn::EmptyOptional())
54{
55 if (backendIds.empty())
56 {
57 return false;
58 }
59
60 armnn::BackendIdSet validBackendIds = armnn::BackendRegistryInstance().GetBackendIds();
61
62 bool allValid = true;
63 for (const auto& backendId : backendIds)
64 {
65 if (std::find(validBackendIds.begin(), validBackendIds.end(), backendId) == validBackendIds.end())
66 {
67 allValid = false;
68 if (invalidBackendIds)
69 {
70 if (!invalidBackendIds.value().empty())
71 {
72 invalidBackendIds.value() += ", ";
73 }
74 invalidBackendIds.value() += backendId;
75 }
76 }
77 }
78 return allValid;
79}
80
81} // anonymous namespace
82
telsoa01c577f2c2018-08-31 09:22:23 +010083namespace InferenceModelInternal
84{
Jim Flynnb4d7eae2019-05-01 14:44:27 +010085using BindingPointInfo = armnn::BindingPointInfo;
telsoa01c577f2c2018-08-31 09:22:23 +010086
87using QuantizationParams = std::pair<float,int32_t>;
88
89struct Params
90{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000091 std::string m_ModelPath;
92 std::vector<std::string> m_InputBindings;
93 std::vector<armnn::TensorShape> m_InputShapes;
94 std::vector<std::string> m_OutputBindings;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000095 std::vector<armnn::BackendId> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +010096 std::string m_DynamicBackendsPath;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000097 size_t m_SubgraphId;
Mike Kelly80512b02022-05-16 23:10:42 +010098 bool m_AllowExpandedDims;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000099 bool m_IsModelBinary;
100 bool m_VisualizePostOptimizationModel;
101 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000102 bool m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +0100103 bool m_PrintIntermediateLayers;
Derek Lamberti132563c2019-12-02 16:06:40 +0000104 bool m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100105 bool m_InferOutputShape;
Sadik Armagana25886e2020-09-15 17:17:08 +0100106 bool m_EnableFastMath;
Matthew Sloyan42432112021-01-08 10:30:51 +0000107 bool m_SaveCachedNetwork;
Keith Davisf4874862021-08-09 16:49:18 +0100108 bool m_OutputDetailsToStdOut;
Keith Davis4914d0c2021-08-18 17:14:05 +0100109 bool m_OutputDetailsOnlyToStdOut;
Matthew Sloyan42432112021-01-08 10:30:51 +0000110 std::string m_CachedNetworkFilePath;
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000111 unsigned int m_NumberOfThreads;
Finn Williams40646322021-02-11 16:16:42 +0000112 std::string m_MLGOTuningFilePath;
Sadik Armagana04a9d72021-04-27 10:02:10 +0100113 bool m_AsyncEnabled;
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100114 size_t m_ThreadPoolSize;
Jim Flynn15425812022-02-15 16:53:13 +0000115 bool m_ImportInputsIfAligned;
Finn Williams40646322021-02-11 16:16:42 +0000116
telsoa01c577f2c2018-08-31 09:22:23 +0100117
118 Params()
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100119 : m_ComputeDevices{}
telsoa01c577f2c2018-08-31 09:22:23 +0100120 , m_SubgraphId(0)
Mike Kelly80512b02022-05-16 23:10:42 +0100121 , m_AllowExpandedDims(false)
telsoa01c577f2c2018-08-31 09:22:23 +0100122 , m_IsModelBinary(true)
123 , m_VisualizePostOptimizationModel(false)
124 , m_EnableFp16TurboMode(false)
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000125 , m_EnableBf16TurboMode(false)
Matthew Jackson54658b92019-08-27 15:35:59 +0100126 , m_PrintIntermediateLayers(false)
Derek Lamberti132563c2019-12-02 16:06:40 +0000127 , m_ParseUnsupported(false)
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100128 , m_InferOutputShape(false)
Sadik Armagana25886e2020-09-15 17:17:08 +0100129 , m_EnableFastMath(false)
Matthew Sloyan42432112021-01-08 10:30:51 +0000130 , m_SaveCachedNetwork(false)
Keith Davisf4874862021-08-09 16:49:18 +0100131 , m_OutputDetailsToStdOut(false)
Keith Davis4914d0c2021-08-18 17:14:05 +0100132 , m_OutputDetailsOnlyToStdOut(false)
Matthew Sloyan42432112021-01-08 10:30:51 +0000133 , m_CachedNetworkFilePath("")
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000134 , m_NumberOfThreads(0)
Finn Williams40646322021-02-11 16:16:42 +0000135 , m_MLGOTuningFilePath("")
Sadik Armagana04a9d72021-04-27 10:02:10 +0100136 , m_AsyncEnabled(false)
Kevin May94dd4db2021-05-26 16:01:08 +0100137 , m_ThreadPoolSize(0)
Jim Flynn15425812022-02-15 16:53:13 +0000138 , m_ImportInputsIfAligned(false)
telsoa01c577f2c2018-08-31 09:22:23 +0100139 {}
140};
141
142} // namespace InferenceModelInternal
143
144template <typename IParser>
145struct CreateNetworkImpl
146{
147public:
148 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100149
150 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100151 std::vector<armnn::BindingPointInfo>& inputBindings,
152 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100153 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000154 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100155
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000156 // Create a network from a file on disk
157 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100158
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000159 std::map<std::string, armnn::TensorShape> inputShapes;
160 if (!params.m_InputShapes.empty())
161 {
162 const size_t numInputShapes = params.m_InputShapes.size();
163 const size_t numInputBindings = params.m_InputBindings.size();
164 if (numInputShapes < numInputBindings)
165 {
James Ward08f40162020-09-07 16:45:07 +0100166 throw armnn::Exception(fmt::format(
167 "Not every input has its tensor shape specified: expected={0}, got={1}",
168 numInputBindings, numInputShapes));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000169 }
telsoa01c577f2c2018-08-31 09:22:23 +0100170
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000171 for (size_t i = 0; i < numInputShapes; i++)
172 {
173 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
174 }
175 }
telsoa01c577f2c2018-08-31 09:22:23 +0100176
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000177 std::vector<std::string> requestedOutputs = params.m_OutputBindings;
178 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
179
180 {
181 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
182 // Handle text and binary input differently by calling the corresponding parser function
183 network = (params.m_IsModelBinary ?
184 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
185 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
186 }
187
188 for (const std::string& inputLayerName : params.m_InputBindings)
189 {
190 inputBindings.push_back(parser->GetNetworkInputBindingInfo(inputLayerName));
191 }
192
193 for (const std::string& outputLayerName : params.m_OutputBindings)
194 {
195 outputBindings.push_back(parser->GetNetworkOutputBindingInfo(outputLayerName));
196 }
197
198 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100199 }
200};
201
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000202#if defined(ARMNN_SERIALIZER)
203template <>
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000204struct CreateNetworkImpl<armnnDeserializer::IDeserializer>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000205{
206public:
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000207 using IParser = armnnDeserializer::IDeserializer;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000208 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000209
210 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100211 std::vector<armnn::BindingPointInfo>& inputBindings,
212 std::vector<armnn::BindingPointInfo>& outputBindings)
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000213 {
214 auto parser(IParser::Create());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100215 ARMNN_ASSERT(parser);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000216
217 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
218
219 {
220 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000221
Francis Murtagh532a29d2020-06-29 11:50:01 +0100222 std::error_code errorCode;
223 fs::path pathToFile(params.m_ModelPath);
224 if (!fs::exists(pathToFile, errorCode))
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000225 {
James Ward08f40162020-09-07 16:45:07 +0100226 throw armnn::FileNotFoundException(fmt::format("Cannot find the file ({0}) errorCode: {1} {2}",
227 params.m_ModelPath,
228 errorCode.message(),
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000229 CHECK_LOCATION().AsString()));
230 }
231 std::ifstream file(params.m_ModelPath, std::ios::binary);
232
233 network = parser->CreateNetworkFromBinary(file);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000234 }
235
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100236 unsigned int subgraphId = armnn::numeric_cast<unsigned int>(params.m_SubgraphId);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000237
238 for (const std::string& inputLayerName : params.m_InputBindings)
239 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000240 armnnDeserializer::BindingPointInfo inputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100241 parser->GetNetworkInputBindingInfo(subgraphId, inputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000242 inputBindings.push_back(std::make_pair(inputBinding.m_BindingId, inputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000243 }
244
245 for (const std::string& outputLayerName : params.m_OutputBindings)
246 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000247 armnnDeserializer::BindingPointInfo outputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100248 parser->GetNetworkOutputBindingInfo(subgraphId, outputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000249 outputBindings.push_back(std::make_pair(outputBinding.m_BindingId, outputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000250 }
251
252 return network;
253 }
254};
255#endif
256
telsoa01c577f2c2018-08-31 09:22:23 +0100257#if defined(ARMNN_TF_LITE_PARSER)
258template <>
259struct CreateNetworkImpl<armnnTfLiteParser::ITfLiteParser>
260{
261public:
262 using IParser = armnnTfLiteParser::ITfLiteParser;
263 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100264
265 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100266 std::vector<armnn::BindingPointInfo>& inputBindings,
267 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100268 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000269 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100270
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000271 // Create a network from a file on disk
Derek Lamberti132563c2019-12-02 16:06:40 +0000272 IParser::TfLiteParserOptions options;
Mike Kelly80512b02022-05-16 23:10:42 +0100273 options.m_AllowExpandedDims = params.m_AllowExpandedDims;
Derek Lamberti132563c2019-12-02 16:06:40 +0000274 options.m_StandInLayerForUnsupported = params.m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100275 options.m_InferAndValidate = params.m_InferOutputShape;
Derek Lamberti132563c2019-12-02 16:06:40 +0000276 auto parser(IParser::Create(options));
telsoa01c577f2c2018-08-31 09:22:23 +0100277
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000278 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100279
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000280 {
281 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
282 network = parser->CreateNetworkFromBinaryFile(modelPath.c_str());
283 }
telsoa01c577f2c2018-08-31 09:22:23 +0100284
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000285 for (const std::string& inputLayerName : params.m_InputBindings)
286 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100287 armnn::BindingPointInfo inputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000288 parser->GetNetworkInputBindingInfo(params.m_SubgraphId, inputLayerName);
289 inputBindings.push_back(inputBinding);
290 }
291
292 for (const std::string& outputLayerName : params.m_OutputBindings)
293 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100294 armnn::BindingPointInfo outputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000295 parser->GetNetworkOutputBindingInfo(params.m_SubgraphId, outputLayerName);
296 outputBindings.push_back(outputBinding);
297 }
298
299 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100300 }
301};
302#endif
303
304#if defined(ARMNN_ONNX_PARSER)
305template <>
306struct CreateNetworkImpl<armnnOnnxParser::IOnnxParser>
307{
308public:
309 using IParser = armnnOnnxParser::IOnnxParser;
310 using Params = InferenceModelInternal::Params;
311 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
312
313 static armnn::INetworkPtr Create(const Params& params,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000314 std::vector<BindingPointInfo>& inputBindings,
315 std::vector<BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100316 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000317 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100318
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000319 // Create a network from a file on disk
320 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100321
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000322 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100323
Narumol Prangnawarat1b11f322021-10-13 11:44:50 +0100324 std::map<std::string, armnn::TensorShape> inputShapes;
325 if (!params.m_InputShapes.empty())
326 {
327 const size_t numInputShapes = params.m_InputShapes.size();
328 const size_t numInputBindings = params.m_InputBindings.size();
329 if (numInputShapes < numInputBindings)
330 {
331 throw armnn::Exception(fmt::format(
332 "Not every input has its tensor shape specified: expected={0}, got={1}",
333 numInputBindings, numInputShapes));
334 }
335
336 for (size_t i = 0; i < numInputShapes; i++)
337 {
338 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
339 }
340
341 {
342 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
343 network = (params.m_IsModelBinary ?
344 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes) :
345 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes));
346 }
347 }
348
349 else
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000350 {
351 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
352 network = (params.m_IsModelBinary ?
353 parser->CreateNetworkFromBinaryFile(modelPath.c_str()) :
354 parser->CreateNetworkFromTextFile(modelPath.c_str()));
355 }
telsoa01c577f2c2018-08-31 09:22:23 +0100356
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000357 for (const std::string& inputLayerName : params.m_InputBindings)
358 {
359 BindingPointInfo inputBinding = parser->GetNetworkInputBindingInfo(inputLayerName);
360 inputBindings.push_back(inputBinding);
361 }
362
363 for (const std::string& outputLayerName : params.m_OutputBindings)
364 {
365 BindingPointInfo outputBinding = parser->GetNetworkOutputBindingInfo(outputLayerName);
366 outputBindings.push_back(outputBinding);
367 }
368
369 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100370 }
371};
372#endif
telsoa014fcda012018-03-09 14:13:49 +0000373
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000374
telsoa014fcda012018-03-09 14:13:49 +0000375
376template <typename IParser, typename TDataType>
377class InferenceModel
378{
379public:
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000380 using DataType = TDataType;
381 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000382 using QuantizationParams = InferenceModelInternal::QuantizationParams;
David Monahan6bb47a72021-10-22 12:57:28 +0100383
telsoa014fcda012018-03-09 14:13:49 +0000384
385 struct CommandLineOptions
386 {
387 std::string m_ModelDir;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000388 std::vector<std::string> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100389 std::string m_DynamicBackendsPath;
surmeh013537c2c2018-05-18 16:31:43 +0100390 bool m_VisualizePostOptimizationModel;
telsoa01c577f2c2018-08-31 09:22:23 +0100391 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000392 bool m_EnableBf16TurboMode;
Pablo Tello507f39d2019-04-15 15:44:39 +0100393 std::string m_Labels;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000394
395 std::vector<armnn::BackendId> GetComputeDevicesAsBackendIds()
396 {
397 std::vector<armnn::BackendId> backendIds;
398 std::copy(m_ComputeDevices.begin(), m_ComputeDevices.end(), std::back_inserter(backendIds));
399 return backendIds;
400 }
telsoa014fcda012018-03-09 14:13:49 +0000401 };
402
James Wardc89829f2020-10-12 14:17:36 +0100403 static void AddCommandLineOptions(cxxopts::Options& options,
404 CommandLineOptions& cLineOptions, std::vector<std::string>& required)
telsoa014fcda012018-03-09 14:13:49 +0000405 {
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000406 const std::vector<std::string> defaultComputes = { "CpuAcc", "CpuRef" };
David Beckf0b48452018-10-19 15:20:56 +0100407
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100408 const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
409 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
410
James Wardc89829f2020-10-12 14:17:36 +0100411 options
412 .allow_unrecognised_options()
413 .add_options()
Nikhil Raj6dd178f2021-04-02 22:04:39 +0100414 ("m,model-dir", "Path to directory containing model files (.prototxt/.tflite)",
James Wardc89829f2020-10-12 14:17:36 +0100415 cxxopts::value<std::string>(cLineOptions.m_ModelDir))
416 ("c,compute", backendsMessage.c_str(),
417 cxxopts::value<std::vector<std::string>>(cLineOptions.m_ComputeDevices)->default_value("CpuRef"))
418 ("b,dynamic-backends-path",
419 "Path where to load any available dynamic backend from. "
420 "If left empty (the default), dynamic backends will not be used.",
421 cxxopts::value(cLineOptions.m_DynamicBackendsPath))
422 ("l,labels",
423 "Text file containing one image filename - correct label pair per line, "
424 "used to test the accuracy of the network.", cxxopts::value<std::string>(cLineOptions.m_Labels))
425 ("v,visualize-optimized-model",
426 "Produce a dot file useful for visualizing the graph post optimization."
427 "The file will have the same name as the model with the .dot extention.",
428 cxxopts::value<bool>(cLineOptions.m_VisualizePostOptimizationModel)->default_value("false"))
429 ("fp16-turbo-mode",
430 "If this option is enabled FP32 layers, weights and biases will be converted "
431 "to FP16 where the backend supports it.",
432 cxxopts::value<bool>(cLineOptions.m_EnableFp16TurboMode)->default_value("false"))
433 ("bf16-turbo-mode",
434 "If this option is enabled FP32 layers, weights and biases will be converted "
435 "to BF16 where the backend supports it.",
436 cxxopts::value<bool>(cLineOptions.m_EnableBf16TurboMode)->default_value("false"));
437
438 required.emplace_back("model-dir");
telsoa014fcda012018-03-09 14:13:49 +0000439 }
440
Matthew Bentham3e68b972019-04-09 13:10:46 +0100441 InferenceModel(const Params& params,
442 bool enableProfiling,
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100443 const std::string& dynamicBackendsPath,
Matthew Bentham3e68b972019-04-09 13:10:46 +0100444 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
Keith Davis4914d0c2021-08-18 17:14:05 +0100445 : m_EnableProfiling(enableProfiling),
Jim Flynn15425812022-02-15 16:53:13 +0000446 m_ProfilingDetailsMethod(armnn::ProfilingDetailsMethod::Undefined),
447 m_DynamicBackendsPath(dynamicBackendsPath),
448 m_ImportInputsIfAligned(params.m_ImportInputsIfAligned)
telsoa014fcda012018-03-09 14:13:49 +0000449 {
telsoa01c577f2c2018-08-31 09:22:23 +0100450 if (runtime)
telsoa014fcda012018-03-09 14:13:49 +0000451 {
telsoa01c577f2c2018-08-31 09:22:23 +0100452 m_Runtime = runtime;
telsoa014fcda012018-03-09 14:13:49 +0000453 }
telsoa01c577f2c2018-08-31 09:22:23 +0100454 else
telsoa014fcda012018-03-09 14:13:49 +0000455 {
telsoa01c577f2c2018-08-31 09:22:23 +0100456 armnn::IRuntime::CreationOptions options;
Nina Drozd549ae372018-09-10 14:26:44 +0100457 options.m_EnableGpuProfiling = m_EnableProfiling;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100458 options.m_DynamicBackendsPath = m_DynamicBackendsPath;
Finn Williamsf364d532021-06-09 17:07:33 +0100459 m_Runtime = armnn::IRuntime::Create(options);
surmeh013537c2c2018-05-18 16:31:43 +0100460 }
telsoa014fcda012018-03-09 14:13:49 +0000461
Keith Davis4914d0c2021-08-18 17:14:05 +0100462 // Configure the Profiler if the the profiling details are opted for
463 if (params.m_OutputDetailsOnlyToStdOut)
464 m_ProfilingDetailsMethod = armnn::ProfilingDetailsMethod::DetailsOnly;
465 else if (params.m_OutputDetailsToStdOut)
466 m_ProfilingDetailsMethod = armnn::ProfilingDetailsMethod::DetailsWithEvents;
467
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100468 std::string invalidBackends;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000469 if (!CheckRequestedBackendsAreValid(params.m_ComputeDevices, armnn::Optional<std::string&>(invalidBackends)))
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100470 {
471 throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends);
472 }
473
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100474 armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork*){}};
surmeh013537c2c2018-05-18 16:31:43 +0100475 {
Finn Williams4422cec2021-03-22 17:51:06 +0000476 const auto parsing_start_time = armnn::GetTimeNow();
477 armnn::INetworkPtr network = CreateNetworkImpl<IParser>::Create(params, m_InputBindings, m_OutputBindings);
478
479 ARMNN_LOG(info) << "Network parsing time: " << std::setprecision(2)
Sadik Armagan4a0844d2022-01-26 09:57:05 +0000480 << std::fixed << armnn::GetTimeDuration(parsing_start_time).count() << " ms.";
Finn Williams4422cec2021-03-22 17:51:06 +0000481
surmeh013537c2c2018-05-18 16:31:43 +0100482 ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
telsoa01c577f2c2018-08-31 09:22:23 +0100483
484 armnn::OptimizerOptions options;
485 options.m_ReduceFp32ToFp16 = params.m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000486 options.m_ReduceFp32ToBf16 = params.m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +0100487 options.m_Debug = params.m_PrintIntermediateLayers;
Finn Williamsd218d982021-08-09 13:00:08 +0100488 options.m_shapeInferenceMethod = params.m_InferOutputShape ?
489 armnn::ShapeInferenceMethod::InferAndValidate : armnn::ShapeInferenceMethod::ValidateOnly;
Derek Lamberti44c4c982021-10-14 00:28:37 +0100490 options.m_ProfilingEnabled = m_EnableProfiling;
Finn Williamsd218d982021-08-09 13:00:08 +0100491
Sadik Armagana25886e2020-09-15 17:17:08 +0100492 armnn::BackendOptions gpuAcc("GpuAcc",
493 {
Matthew Sloyan42432112021-01-08 10:30:51 +0000494 { "FastMathEnabled", params.m_EnableFastMath },
495 { "SaveCachedNetwork", params.m_SaveCachedNetwork },
Finn Williams40646322021-02-11 16:16:42 +0000496 { "CachedNetworkFilePath", params.m_CachedNetworkFilePath },
497 { "MLGOTuningFilePath", params.m_MLGOTuningFilePath }
Sadik Armagana25886e2020-09-15 17:17:08 +0100498 });
Finn Williams40646322021-02-11 16:16:42 +0000499
Sadik Armagana25886e2020-09-15 17:17:08 +0100500 armnn::BackendOptions cpuAcc("CpuAcc",
501 {
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000502 { "FastMathEnabled", params.m_EnableFastMath },
503 { "NumberOfThreads", params.m_NumberOfThreads }
Sadik Armagana25886e2020-09-15 17:17:08 +0100504 });
505 options.m_ModelOptions.push_back(gpuAcc);
506 options.m_ModelOptions.push_back(cpuAcc);
507
alered01a7227ac2020-05-07 14:58:29 +0100508 const auto optimization_start_time = armnn::GetTimeNow();
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000509 optNet = armnn::Optimize(*network, params.m_ComputeDevices, m_Runtime->GetDeviceSpec(), options);
alered01a7227ac2020-05-07 14:58:29 +0100510
511 ARMNN_LOG(info) << "Optimization time: " << std::setprecision(2)
Sadik Armagan4a0844d2022-01-26 09:57:05 +0000512 << std::fixed << armnn::GetTimeDuration(optimization_start_time).count() << " ms.";
alered01a7227ac2020-05-07 14:58:29 +0100513
telsoa01c577f2c2018-08-31 09:22:23 +0100514 if (!optNet)
515 {
516 throw armnn::Exception("Optimize returned nullptr");
517 }
Finn Williams4422cec2021-03-22 17:51:06 +0000518
519
surmeh013537c2c2018-05-18 16:31:43 +0100520 }
telsoa014fcda012018-03-09 14:13:49 +0000521
surmeh013537c2c2018-05-18 16:31:43 +0100522 if (params.m_VisualizePostOptimizationModel)
523 {
Francis Murtagh532a29d2020-06-29 11:50:01 +0100524 fs::path filename = params.m_ModelPath;
surmeh013537c2c2018-05-18 16:31:43 +0100525 filename.replace_extension("dot");
Rob Hughes9e10c2b2019-07-23 15:37:19 +0100526 std::fstream file(filename.c_str(), std::ios_base::out);
surmeh013537c2c2018-05-18 16:31:43 +0100527 optNet->SerializeToDot(file);
528 }
529
530 armnn::Status ret;
531 {
532 ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");
Matthew Sloyan4f29f152021-01-18 16:10:20 +0000533
534 const auto loading_start_time = armnn::GetTimeNow();
Francis Murtagh73d3e2e2021-04-29 14:23:04 +0100535 armnn::INetworkProperties networkProperties(params.m_AsyncEnabled,
536 armnn::MemorySource::Undefined,
Keith Davis554fa092021-07-20 11:25:22 +0100537 armnn::MemorySource::Undefined,
Keith Davisf4874862021-08-09 16:49:18 +0100538 enableProfiling,
Keith Davis4914d0c2021-08-18 17:14:05 +0100539 m_ProfilingDetailsMethod);
Sadik Armagana04a9d72021-04-27 10:02:10 +0100540 std::string errorMessage;
541 ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet), errorMessage, networkProperties);
Matthew Sloyan4f29f152021-01-18 16:10:20 +0000542
543 ARMNN_LOG(info) << "Network loading time: " << std::setprecision(2)
Sadik Armagan4a0844d2022-01-26 09:57:05 +0000544 << std::fixed << armnn::GetTimeDuration(loading_start_time).count() << " ms.";
Finn Williamsf364d532021-06-09 17:07:33 +0100545
546 if (params.m_AsyncEnabled && params.m_ThreadPoolSize > 0)
547 {
548 std::vector<std::shared_ptr<armnn::IWorkingMemHandle>> memHandles;
549 for (size_t i = 0; i < params.m_ThreadPoolSize; ++i)
550 {
551 memHandles.emplace_back(m_Runtime->CreateWorkingMemHandle(m_NetworkIdentifier));
552 }
553
554 m_Threadpool = std::make_unique<armnn::Threadpool>(params.m_ThreadPoolSize,
555 m_Runtime.get(),
556 memHandles);
557 }
surmeh013537c2c2018-05-18 16:31:43 +0100558 }
559
telsoa014fcda012018-03-09 14:13:49 +0000560 if (ret == armnn::Status::Failure)
561 {
562 throw armnn::Exception("IRuntime::LoadNetwork failed");
563 }
564 }
565
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000566 void CheckInputIndexIsValid(unsigned int inputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000567 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000568 if (m_InputBindings.size() < inputIndex + 1)
569 {
James Ward08f40162020-09-07 16:45:07 +0100570 throw armnn::Exception(fmt::format("Input index out of range: {}", inputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000571 }
telsoa014fcda012018-03-09 14:13:49 +0000572 }
573
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000574 void CheckOutputIndexIsValid(unsigned int outputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000575 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000576 if (m_OutputBindings.size() < outputIndex + 1)
577 {
James Ward08f40162020-09-07 16:45:07 +0100578 throw armnn::Exception(fmt::format("Output index out of range: {}", outputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000579 }
580 }
581
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100582 unsigned int GetInputSize(unsigned int inputIndex = 0u) const
583 {
584 CheckInputIndexIsValid(inputIndex);
585 return m_InputBindings[inputIndex].second.GetNumElements();
586 }
587
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000588 unsigned int GetOutputSize(unsigned int outputIndex = 0u) const
589 {
590 CheckOutputIndexIsValid(outputIndex);
591 return m_OutputBindings[outputIndex].second.GetNumElements();
592 }
593
James Conroy7b4886f2019-04-11 10:23:58 +0100594 std::chrono::duration<double, std::milli> Run(
Francis Murtagh40d27412021-10-28 11:11:35 +0100595 const std::vector<armnnUtils::TContainer>& inputContainers,
596 std::vector<armnnUtils::TContainer>& outputContainers)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000597 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000598 for (unsigned int i = 0; i < outputContainers.size(); ++i)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000599 {
600 const unsigned int expectedOutputDataSize = GetOutputSize(i);
Ferran Balaguerc602f292019-02-08 17:09:55 +0000601
James Ward6d9f5c52020-09-28 11:56:35 +0100602 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000603 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100604 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
Ferran Balaguerc602f292019-02-08 17:09:55 +0000605 if (actualOutputDataSize < expectedOutputDataSize)
606 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100607 unsigned int outputIndex = i;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000608 throw armnn::Exception(
James Ward08f40162020-09-07 16:45:07 +0100609 fmt::format("Not enough data for output #{0}: expected "
610 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
Ferran Balaguerc602f292019-02-08 17:09:55 +0000611 }
612 },
613 outputContainers[i]);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000614 }
telsoa01c577f2c2018-08-31 09:22:23 +0100615
616 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
telsoa01c577f2c2018-08-31 09:22:23 +0100617
James Conroy7b4886f2019-04-11 10:23:58 +0100618 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
alered01a7227ac2020-05-07 14:58:29 +0100619 const auto start_time = armnn::GetTimeNow();
James Conroy7b4886f2019-04-11 10:23:58 +0100620
Jim Flynn15425812022-02-15 16:53:13 +0000621 armnn::Status ret;
622 if (m_ImportInputsIfAligned)
623 {
624 std::vector<armnn::ImportedInputId> importedInputIds = m_Runtime->ImportInputs(
625 m_NetworkIdentifier, MakeInputTensors(inputContainers), armnn::MemorySource::Malloc);
626
627 std::vector<armnn::ImportedOutputId> importedOutputIds = m_Runtime->ImportOutputs(
628 m_NetworkIdentifier, MakeOutputTensors(outputContainers), armnn::MemorySource::Malloc);
629
630 ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
631 MakeInputTensors(inputContainers),
632 MakeOutputTensors(outputContainers),
633 importedInputIds,
634 importedOutputIds);
635 }
636 else
637 {
638 ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
639 MakeInputTensors(inputContainers),
640 MakeOutputTensors(outputContainers));
641 }
alered01a7227ac2020-05-07 14:58:29 +0100642 const auto duration = armnn::GetTimeDuration(start_time);
James Conroy7b4886f2019-04-11 10:23:58 +0100643
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100644 // if profiling is enabled print out the results
645 if (profiler && profiler->IsProfilingEnabled())
646 {
647 profiler->Print(std::cout);
648 }
649
telsoa014fcda012018-03-09 14:13:49 +0000650 if (ret == armnn::Status::Failure)
651 {
652 throw armnn::Exception("IRuntime::EnqueueWorkload failed");
653 }
James Conroy7b4886f2019-04-11 10:23:58 +0100654 else
655 {
alered01a7227ac2020-05-07 14:58:29 +0100656 return duration;
James Conroy7b4886f2019-04-11 10:23:58 +0100657 }
telsoa014fcda012018-03-09 14:13:49 +0000658 }
659
Finn Williamsf364d532021-06-09 17:07:33 +0100660 std::tuple<unsigned int, std::chrono::duration<double, std::milli>> RunAsync(
Sadik Armagana04a9d72021-04-27 10:02:10 +0100661 armnn::experimental::IWorkingMemHandle& workingMemHandleRef,
Francis Murtagh40d27412021-10-28 11:11:35 +0100662 const std::vector<armnnUtils::TContainer>& inputContainers,
663 std::vector<armnnUtils::TContainer>& outputContainers,
Finn Williamsf364d532021-06-09 17:07:33 +0100664 unsigned int inferenceID)
Sadik Armagana04a9d72021-04-27 10:02:10 +0100665 {
666 for (unsigned int i = 0; i < outputContainers.size(); ++i)
667 {
668 const unsigned int expectedOutputDataSize = GetOutputSize(i);
669
670 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
671 {
672 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
673 if (actualOutputDataSize < expectedOutputDataSize)
674 {
675 unsigned int outputIndex = i;
676 throw armnn::Exception(
677 fmt::format("Not enough data for output #{0}: expected "
678 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
679 }
680 },
681 outputContainers[i]);
682 }
683
684 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
Sadik Armagana04a9d72021-04-27 10:02:10 +0100685
686 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
687 const auto start_time = armnn::GetTimeNow();
688
689 armnn::Status ret = m_Runtime->Execute(workingMemHandleRef,
690 MakeInputTensors(inputContainers),
691 MakeOutputTensors(outputContainers));
Sadik Armagana04a9d72021-04-27 10:02:10 +0100692
693 const auto duration = armnn::GetTimeDuration(start_time);
694
695 // if profiling is enabled print out the results
696 if (profiler && profiler->IsProfilingEnabled())
697 {
698 profiler->Print(std::cout);
699 }
700
701 if (ret == armnn::Status::Failure)
702 {
703 throw armnn::Exception(
704 fmt::format("IRuntime::Execute asynchronously failed for network #{0} on inference #{1}",
705 m_NetworkIdentifier, inferenceID));
706 }
707 else
708 {
709 return std::make_tuple(inferenceID, duration);
710 }
711 }
712
Francis Murtagh40d27412021-10-28 11:11:35 +0100713 void RunAsync(const std::vector<armnnUtils::TContainer>& inputContainers,
714 std::vector<armnnUtils::TContainer>& outputContainers,
Finn Williamsf364d532021-06-09 17:07:33 +0100715 std::shared_ptr<armnn::IAsyncExecutionCallback> cb)
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100716 {
717 for (unsigned int i = 0; i < outputContainers.size(); ++i)
718 {
719 const unsigned int expectedOutputDataSize = GetOutputSize(i);
720
721 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
722 {
723 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
724 if (actualOutputDataSize < expectedOutputDataSize)
725 {
726 unsigned int outputIndex = i;
727 throw armnn::Exception(
728 fmt::format("Not enough data for output #{0}: expected "
729 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
730 }
731 },
732 outputContainers[i]);
733 }
734
735 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100736
Finn Williamsf364d532021-06-09 17:07:33 +0100737 m_Threadpool->Schedule(m_NetworkIdentifier,
738 MakeInputTensors(inputContainers),
739 MakeOutputTensors(outputContainers),
740 armnn::QosExecPriority::Medium,
741 cb);
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100742
743 // if profiling is enabled print out the results
744 if (profiler && profiler->IsProfilingEnabled())
745 {
746 profiler->Print(std::cout);
747 }
748 }
749
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100750 const armnn::BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100751 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000752 CheckInputIndexIsValid(inputIndex);
753 return m_InputBindings[inputIndex];
telsoa01c577f2c2018-08-31 09:22:23 +0100754 }
755
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100756 const std::vector<armnn::BindingPointInfo>& GetInputBindingInfos() const
telsoa01c577f2c2018-08-31 09:22:23 +0100757 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000758 return m_InputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100759 }
760
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100761 const armnn::BindingPointInfo& GetOutputBindingInfo(unsigned int outputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100762 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000763 CheckOutputIndexIsValid(outputIndex);
764 return m_OutputBindings[outputIndex];
765 }
766
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100767 const std::vector<armnn::BindingPointInfo>& GetOutputBindingInfos() const
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000768 {
769 return m_OutputBindings;
770 }
771
772 QuantizationParams GetQuantizationParams(unsigned int outputIndex = 0u) const
773 {
774 CheckOutputIndexIsValid(outputIndex);
775 return std::make_pair(m_OutputBindings[outputIndex].second.GetQuantizationScale(),
776 m_OutputBindings[outputIndex].second.GetQuantizationOffset());
777 }
778
Narumol Prangnawarat4628d052019-02-25 17:26:05 +0000779 QuantizationParams GetInputQuantizationParams(unsigned int inputIndex = 0u) const
780 {
781 CheckInputIndexIsValid(inputIndex);
782 return std::make_pair(m_InputBindings[inputIndex].second.GetQuantizationScale(),
783 m_InputBindings[inputIndex].second.GetQuantizationOffset());
784 }
785
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000786 std::vector<QuantizationParams> GetAllQuantizationParams() const
787 {
788 std::vector<QuantizationParams> quantizationParams;
789 for (unsigned int i = 0u; i < m_OutputBindings.size(); i++)
790 {
791 quantizationParams.push_back(GetQuantizationParams(i));
792 }
793 return quantizationParams;
telsoa01c577f2c2018-08-31 09:22:23 +0100794 }
795
Sadik Armagana04a9d72021-04-27 10:02:10 +0100796 std::unique_ptr<armnn::experimental::IWorkingMemHandle> CreateWorkingMemHandle()
797 {
798 return m_Runtime->CreateWorkingMemHandle(m_NetworkIdentifier);
799 }
800
telsoa014fcda012018-03-09 14:13:49 +0000801private:
telsoa01c577f2c2018-08-31 09:22:23 +0100802 armnn::NetworkId m_NetworkIdentifier;
803 std::shared_ptr<armnn::IRuntime> m_Runtime;
Finn Williamsf364d532021-06-09 17:07:33 +0100804 std::unique_ptr<armnn::Threadpool> m_Threadpool;
telsoa01c577f2c2018-08-31 09:22:23 +0100805
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100806 std::vector<armnn::BindingPointInfo> m_InputBindings;
807 std::vector<armnn::BindingPointInfo> m_OutputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100808 bool m_EnableProfiling;
Keith Davis4914d0c2021-08-18 17:14:05 +0100809 armnn::ProfilingDetailsMethod m_ProfilingDetailsMethod;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100810 std::string m_DynamicBackendsPath;
Jim Flynn15425812022-02-15 16:53:13 +0000811 bool m_ImportInputsIfAligned;
telsoa01c577f2c2018-08-31 09:22:23 +0100812
telsoa014fcda012018-03-09 14:13:49 +0000813 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000814 armnn::InputTensors MakeInputTensors(const std::vector<TContainer>& inputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000815 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100816 return armnnUtils::MakeInputTensors(m_InputBindings, inputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000817 }
818
819 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000820 armnn::OutputTensors MakeOutputTensors(std::vector<TContainer>& outputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000821 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100822 return armnnUtils::MakeOutputTensors(m_OutputBindings, outputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000823 }
Ferran Balaguerc602f292019-02-08 17:09:55 +0000824};