blob: e2cd5d9868c86a58f0b7fe671204f1f61e385ad5 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Sadik Armagana9c2ce12020-07-14 10:02:22 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matteo Martincighc601aa62019-10-29 15:03:22 +00005
telsoa014fcda012018-03-09 14:13:49 +00006#pragma once
Matteo Martincighc601aa62019-10-29 15:03:22 +00007
David Monahan6bb47a72021-10-22 12:57:28 +01008
David Beckf0b48452018-10-19 15:20:56 +01009#include <armnn/ArmNN.hpp>
David Monahan6bb47a72021-10-22 12:57:28 +010010#include <armnn/Utils.hpp>
Finn Williamsf364d532021-06-09 17:07:33 +010011#include <armnn/Threadpool.hpp>
alered01a7227ac2020-05-07 14:58:29 +010012#include <armnn/Logging.hpp>
13#include <armnn/utility/Timer.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000014#include <armnn/BackendRegistry.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010015#include <armnn/utility/Assert.hpp>
Matthew Sloyan80c6b142020-09-08 12:00:32 +010016#include <armnn/utility/NumericCast.hpp>
Nikhil Raj7dcc6972021-04-30 15:44:24 +010017#include <common/include/ProfilingGuid.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010018
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000019#if defined(ARMNN_SERIALIZER)
Derek Lamberti0028d1b2019-02-20 13:57:42 +000020#include "armnnDeserializer/IDeserializer.hpp"
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000021#endif
telsoa01c577f2c2018-08-31 09:22:23 +010022#if defined(ARMNN_TF_LITE_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010023#include <armnnTfLiteParser/ITfLiteParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010024#endif
telsoa01c577f2c2018-08-31 09:22:23 +010025#if defined(ARMNN_ONNX_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010026#include <armnnOnnxParser/IOnnxParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010027#endif
telsoa014fcda012018-03-09 14:13:49 +000028
Rob Hughes9542f902021-07-14 09:48:54 +010029#include <armnnUtils/Filesystem.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000030#include <HeapProfiling.hpp>
Jim Flynn2fd61002019-05-03 12:54:26 +010031#include <TensorIOUtils.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000032
David Monahana8837bf2020-04-16 10:01:56 +010033#include "armnn/utility/StringUtils.hpp"
James Wardc89829f2020-10-12 14:17:36 +010034#include <cxxopts/cxxopts.hpp>
35#include "CxxoptsUtils.hpp"
James Ward08f40162020-09-07 16:45:07 +010036#include <fmt/format.h>
James Ward6d9f5c52020-09-28 11:56:35 +010037#include <mapbox/variant.hpp>
telsoa014fcda012018-03-09 14:13:49 +000038
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000039#include <algorithm>
40#include <iterator>
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010041#include <fstream>
telsoa014fcda012018-03-09 14:13:49 +000042#include <map>
43#include <string>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000044#include <vector>
telsoa01c577f2c2018-08-31 09:22:23 +010045#include <type_traits>
46
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010047namespace
48{
49
50inline bool CheckRequestedBackendsAreValid(const std::vector<armnn::BackendId>& backendIds,
51 armnn::Optional<std::string&> invalidBackendIds = armnn::EmptyOptional())
52{
53 if (backendIds.empty())
54 {
55 return false;
56 }
57
58 armnn::BackendIdSet validBackendIds = armnn::BackendRegistryInstance().GetBackendIds();
59
60 bool allValid = true;
61 for (const auto& backendId : backendIds)
62 {
63 if (std::find(validBackendIds.begin(), validBackendIds.end(), backendId) == validBackendIds.end())
64 {
65 allValid = false;
66 if (invalidBackendIds)
67 {
68 if (!invalidBackendIds.value().empty())
69 {
70 invalidBackendIds.value() += ", ";
71 }
72 invalidBackendIds.value() += backendId;
73 }
74 }
75 }
76 return allValid;
77}
78
79} // anonymous namespace
80
telsoa01c577f2c2018-08-31 09:22:23 +010081namespace InferenceModelInternal
82{
Jim Flynnb4d7eae2019-05-01 14:44:27 +010083using BindingPointInfo = armnn::BindingPointInfo;
telsoa01c577f2c2018-08-31 09:22:23 +010084
85using QuantizationParams = std::pair<float,int32_t>;
86
87struct Params
88{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000089 std::string m_ModelPath;
90 std::vector<std::string> m_InputBindings;
91 std::vector<armnn::TensorShape> m_InputShapes;
92 std::vector<std::string> m_OutputBindings;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000093 std::vector<armnn::BackendId> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +010094 std::string m_DynamicBackendsPath;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000095 size_t m_SubgraphId;
96 bool m_IsModelBinary;
97 bool m_VisualizePostOptimizationModel;
98 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +000099 bool m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +0100100 bool m_PrintIntermediateLayers;
Derek Lamberti132563c2019-12-02 16:06:40 +0000101 bool m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100102 bool m_InferOutputShape;
Sadik Armagana25886e2020-09-15 17:17:08 +0100103 bool m_EnableFastMath;
Matthew Sloyan42432112021-01-08 10:30:51 +0000104 bool m_SaveCachedNetwork;
Keith Davisf4874862021-08-09 16:49:18 +0100105 bool m_OutputDetailsToStdOut;
Keith Davis4914d0c2021-08-18 17:14:05 +0100106 bool m_OutputDetailsOnlyToStdOut;
Matthew Sloyan42432112021-01-08 10:30:51 +0000107 std::string m_CachedNetworkFilePath;
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000108 unsigned int m_NumberOfThreads;
Finn Williams40646322021-02-11 16:16:42 +0000109 std::string m_MLGOTuningFilePath;
Sadik Armagana04a9d72021-04-27 10:02:10 +0100110 bool m_AsyncEnabled;
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100111 size_t m_ThreadPoolSize;
Finn Williams40646322021-02-11 16:16:42 +0000112
telsoa01c577f2c2018-08-31 09:22:23 +0100113
114 Params()
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100115 : m_ComputeDevices{}
telsoa01c577f2c2018-08-31 09:22:23 +0100116 , m_SubgraphId(0)
117 , m_IsModelBinary(true)
118 , m_VisualizePostOptimizationModel(false)
119 , m_EnableFp16TurboMode(false)
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000120 , m_EnableBf16TurboMode(false)
Matthew Jackson54658b92019-08-27 15:35:59 +0100121 , m_PrintIntermediateLayers(false)
Derek Lamberti132563c2019-12-02 16:06:40 +0000122 , m_ParseUnsupported(false)
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100123 , m_InferOutputShape(false)
Sadik Armagana25886e2020-09-15 17:17:08 +0100124 , m_EnableFastMath(false)
Matthew Sloyan42432112021-01-08 10:30:51 +0000125 , m_SaveCachedNetwork(false)
Keith Davisf4874862021-08-09 16:49:18 +0100126 , m_OutputDetailsToStdOut(false)
Keith Davis4914d0c2021-08-18 17:14:05 +0100127 , m_OutputDetailsOnlyToStdOut(false)
Matthew Sloyan42432112021-01-08 10:30:51 +0000128 , m_CachedNetworkFilePath("")
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000129 , m_NumberOfThreads(0)
Finn Williams40646322021-02-11 16:16:42 +0000130 , m_MLGOTuningFilePath("")
Sadik Armagana04a9d72021-04-27 10:02:10 +0100131 , m_AsyncEnabled(false)
Kevin May94dd4db2021-05-26 16:01:08 +0100132 , m_ThreadPoolSize(0)
telsoa01c577f2c2018-08-31 09:22:23 +0100133 {}
134};
135
136} // namespace InferenceModelInternal
137
138template <typename IParser>
139struct CreateNetworkImpl
140{
141public:
142 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100143
144 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100145 std::vector<armnn::BindingPointInfo>& inputBindings,
146 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100147 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000148 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100149
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000150 // Create a network from a file on disk
151 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100152
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000153 std::map<std::string, armnn::TensorShape> inputShapes;
154 if (!params.m_InputShapes.empty())
155 {
156 const size_t numInputShapes = params.m_InputShapes.size();
157 const size_t numInputBindings = params.m_InputBindings.size();
158 if (numInputShapes < numInputBindings)
159 {
James Ward08f40162020-09-07 16:45:07 +0100160 throw armnn::Exception(fmt::format(
161 "Not every input has its tensor shape specified: expected={0}, got={1}",
162 numInputBindings, numInputShapes));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000163 }
telsoa01c577f2c2018-08-31 09:22:23 +0100164
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000165 for (size_t i = 0; i < numInputShapes; i++)
166 {
167 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
168 }
169 }
telsoa01c577f2c2018-08-31 09:22:23 +0100170
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000171 std::vector<std::string> requestedOutputs = params.m_OutputBindings;
172 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
173
174 {
175 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
176 // Handle text and binary input differently by calling the corresponding parser function
177 network = (params.m_IsModelBinary ?
178 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
179 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
180 }
181
182 for (const std::string& inputLayerName : params.m_InputBindings)
183 {
184 inputBindings.push_back(parser->GetNetworkInputBindingInfo(inputLayerName));
185 }
186
187 for (const std::string& outputLayerName : params.m_OutputBindings)
188 {
189 outputBindings.push_back(parser->GetNetworkOutputBindingInfo(outputLayerName));
190 }
191
192 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100193 }
194};
195
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000196#if defined(ARMNN_SERIALIZER)
197template <>
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000198struct CreateNetworkImpl<armnnDeserializer::IDeserializer>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000199{
200public:
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000201 using IParser = armnnDeserializer::IDeserializer;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000202 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000203
204 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100205 std::vector<armnn::BindingPointInfo>& inputBindings,
206 std::vector<armnn::BindingPointInfo>& outputBindings)
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000207 {
208 auto parser(IParser::Create());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100209 ARMNN_ASSERT(parser);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000210
211 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
212
213 {
214 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000215
Francis Murtagh532a29d2020-06-29 11:50:01 +0100216 std::error_code errorCode;
217 fs::path pathToFile(params.m_ModelPath);
218 if (!fs::exists(pathToFile, errorCode))
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000219 {
James Ward08f40162020-09-07 16:45:07 +0100220 throw armnn::FileNotFoundException(fmt::format("Cannot find the file ({0}) errorCode: {1} {2}",
221 params.m_ModelPath,
222 errorCode.message(),
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000223 CHECK_LOCATION().AsString()));
224 }
225 std::ifstream file(params.m_ModelPath, std::ios::binary);
226
227 network = parser->CreateNetworkFromBinary(file);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000228 }
229
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100230 unsigned int subgraphId = armnn::numeric_cast<unsigned int>(params.m_SubgraphId);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000231
232 for (const std::string& inputLayerName : params.m_InputBindings)
233 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000234 armnnDeserializer::BindingPointInfo inputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100235 parser->GetNetworkInputBindingInfo(subgraphId, inputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000236 inputBindings.push_back(std::make_pair(inputBinding.m_BindingId, inputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000237 }
238
239 for (const std::string& outputLayerName : params.m_OutputBindings)
240 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000241 armnnDeserializer::BindingPointInfo outputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100242 parser->GetNetworkOutputBindingInfo(subgraphId, outputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000243 outputBindings.push_back(std::make_pair(outputBinding.m_BindingId, outputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000244 }
245
246 return network;
247 }
248};
249#endif
250
telsoa01c577f2c2018-08-31 09:22:23 +0100251#if defined(ARMNN_TF_LITE_PARSER)
252template <>
253struct CreateNetworkImpl<armnnTfLiteParser::ITfLiteParser>
254{
255public:
256 using IParser = armnnTfLiteParser::ITfLiteParser;
257 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100258
259 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100260 std::vector<armnn::BindingPointInfo>& inputBindings,
261 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100262 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000263 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100264
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000265 // Create a network from a file on disk
Derek Lamberti132563c2019-12-02 16:06:40 +0000266 IParser::TfLiteParserOptions options;
267 options.m_StandInLayerForUnsupported = params.m_ParseUnsupported;
Sadik Armagana9c2ce12020-07-14 10:02:22 +0100268 options.m_InferAndValidate = params.m_InferOutputShape;
Derek Lamberti132563c2019-12-02 16:06:40 +0000269 auto parser(IParser::Create(options));
telsoa01c577f2c2018-08-31 09:22:23 +0100270
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000271 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100272
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000273 {
274 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
275 network = parser->CreateNetworkFromBinaryFile(modelPath.c_str());
276 }
telsoa01c577f2c2018-08-31 09:22:23 +0100277
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000278 for (const std::string& inputLayerName : params.m_InputBindings)
279 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100280 armnn::BindingPointInfo inputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000281 parser->GetNetworkInputBindingInfo(params.m_SubgraphId, inputLayerName);
282 inputBindings.push_back(inputBinding);
283 }
284
285 for (const std::string& outputLayerName : params.m_OutputBindings)
286 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100287 armnn::BindingPointInfo outputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000288 parser->GetNetworkOutputBindingInfo(params.m_SubgraphId, outputLayerName);
289 outputBindings.push_back(outputBinding);
290 }
291
292 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100293 }
294};
295#endif
296
297#if defined(ARMNN_ONNX_PARSER)
298template <>
299struct CreateNetworkImpl<armnnOnnxParser::IOnnxParser>
300{
301public:
302 using IParser = armnnOnnxParser::IOnnxParser;
303 using Params = InferenceModelInternal::Params;
304 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
305
306 static armnn::INetworkPtr Create(const Params& params,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000307 std::vector<BindingPointInfo>& inputBindings,
308 std::vector<BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100309 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000310 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100311
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000312 // Create a network from a file on disk
313 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100314
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000315 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100316
Narumol Prangnawarat1b11f322021-10-13 11:44:50 +0100317 std::map<std::string, armnn::TensorShape> inputShapes;
318 if (!params.m_InputShapes.empty())
319 {
320 const size_t numInputShapes = params.m_InputShapes.size();
321 const size_t numInputBindings = params.m_InputBindings.size();
322 if (numInputShapes < numInputBindings)
323 {
324 throw armnn::Exception(fmt::format(
325 "Not every input has its tensor shape specified: expected={0}, got={1}",
326 numInputBindings, numInputShapes));
327 }
328
329 for (size_t i = 0; i < numInputShapes; i++)
330 {
331 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
332 }
333
334 {
335 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
336 network = (params.m_IsModelBinary ?
337 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes) :
338 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes));
339 }
340 }
341
342 else
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000343 {
344 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
345 network = (params.m_IsModelBinary ?
346 parser->CreateNetworkFromBinaryFile(modelPath.c_str()) :
347 parser->CreateNetworkFromTextFile(modelPath.c_str()));
348 }
telsoa01c577f2c2018-08-31 09:22:23 +0100349
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000350 for (const std::string& inputLayerName : params.m_InputBindings)
351 {
352 BindingPointInfo inputBinding = parser->GetNetworkInputBindingInfo(inputLayerName);
353 inputBindings.push_back(inputBinding);
354 }
355
356 for (const std::string& outputLayerName : params.m_OutputBindings)
357 {
358 BindingPointInfo outputBinding = parser->GetNetworkOutputBindingInfo(outputLayerName);
359 outputBindings.push_back(outputBinding);
360 }
361
362 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100363 }
364};
365#endif
telsoa014fcda012018-03-09 14:13:49 +0000366
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000367
telsoa014fcda012018-03-09 14:13:49 +0000368
369template <typename IParser, typename TDataType>
370class InferenceModel
371{
372public:
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000373 using DataType = TDataType;
374 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000375 using QuantizationParams = InferenceModelInternal::QuantizationParams;
David Monahan6bb47a72021-10-22 12:57:28 +0100376
telsoa014fcda012018-03-09 14:13:49 +0000377
378 struct CommandLineOptions
379 {
380 std::string m_ModelDir;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000381 std::vector<std::string> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100382 std::string m_DynamicBackendsPath;
surmeh013537c2c2018-05-18 16:31:43 +0100383 bool m_VisualizePostOptimizationModel;
telsoa01c577f2c2018-08-31 09:22:23 +0100384 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000385 bool m_EnableBf16TurboMode;
Pablo Tello507f39d2019-04-15 15:44:39 +0100386 std::string m_Labels;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000387
388 std::vector<armnn::BackendId> GetComputeDevicesAsBackendIds()
389 {
390 std::vector<armnn::BackendId> backendIds;
391 std::copy(m_ComputeDevices.begin(), m_ComputeDevices.end(), std::back_inserter(backendIds));
392 return backendIds;
393 }
telsoa014fcda012018-03-09 14:13:49 +0000394 };
395
James Wardc89829f2020-10-12 14:17:36 +0100396 static void AddCommandLineOptions(cxxopts::Options& options,
397 CommandLineOptions& cLineOptions, std::vector<std::string>& required)
telsoa014fcda012018-03-09 14:13:49 +0000398 {
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000399 const std::vector<std::string> defaultComputes = { "CpuAcc", "CpuRef" };
David Beckf0b48452018-10-19 15:20:56 +0100400
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100401 const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
402 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
403
James Wardc89829f2020-10-12 14:17:36 +0100404 options
405 .allow_unrecognised_options()
406 .add_options()
Nikhil Raj6dd178f2021-04-02 22:04:39 +0100407 ("m,model-dir", "Path to directory containing model files (.prototxt/.tflite)",
James Wardc89829f2020-10-12 14:17:36 +0100408 cxxopts::value<std::string>(cLineOptions.m_ModelDir))
409 ("c,compute", backendsMessage.c_str(),
410 cxxopts::value<std::vector<std::string>>(cLineOptions.m_ComputeDevices)->default_value("CpuRef"))
411 ("b,dynamic-backends-path",
412 "Path where to load any available dynamic backend from. "
413 "If left empty (the default), dynamic backends will not be used.",
414 cxxopts::value(cLineOptions.m_DynamicBackendsPath))
415 ("l,labels",
416 "Text file containing one image filename - correct label pair per line, "
417 "used to test the accuracy of the network.", cxxopts::value<std::string>(cLineOptions.m_Labels))
418 ("v,visualize-optimized-model",
419 "Produce a dot file useful for visualizing the graph post optimization."
420 "The file will have the same name as the model with the .dot extention.",
421 cxxopts::value<bool>(cLineOptions.m_VisualizePostOptimizationModel)->default_value("false"))
422 ("fp16-turbo-mode",
423 "If this option is enabled FP32 layers, weights and biases will be converted "
424 "to FP16 where the backend supports it.",
425 cxxopts::value<bool>(cLineOptions.m_EnableFp16TurboMode)->default_value("false"))
426 ("bf16-turbo-mode",
427 "If this option is enabled FP32 layers, weights and biases will be converted "
428 "to BF16 where the backend supports it.",
429 cxxopts::value<bool>(cLineOptions.m_EnableBf16TurboMode)->default_value("false"));
430
431 required.emplace_back("model-dir");
telsoa014fcda012018-03-09 14:13:49 +0000432 }
433
Matthew Bentham3e68b972019-04-09 13:10:46 +0100434 InferenceModel(const Params& params,
435 bool enableProfiling,
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100436 const std::string& dynamicBackendsPath,
Matthew Bentham3e68b972019-04-09 13:10:46 +0100437 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
Keith Davis4914d0c2021-08-18 17:14:05 +0100438 : m_EnableProfiling(enableProfiling),
439 m_ProfilingDetailsMethod(armnn::ProfilingDetailsMethod::Undefined)
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100440 , m_DynamicBackendsPath(dynamicBackendsPath)
telsoa014fcda012018-03-09 14:13:49 +0000441 {
telsoa01c577f2c2018-08-31 09:22:23 +0100442 if (runtime)
telsoa014fcda012018-03-09 14:13:49 +0000443 {
telsoa01c577f2c2018-08-31 09:22:23 +0100444 m_Runtime = runtime;
telsoa014fcda012018-03-09 14:13:49 +0000445 }
telsoa01c577f2c2018-08-31 09:22:23 +0100446 else
telsoa014fcda012018-03-09 14:13:49 +0000447 {
telsoa01c577f2c2018-08-31 09:22:23 +0100448 armnn::IRuntime::CreationOptions options;
Nina Drozd549ae372018-09-10 14:26:44 +0100449 options.m_EnableGpuProfiling = m_EnableProfiling;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100450 options.m_DynamicBackendsPath = m_DynamicBackendsPath;
Finn Williamsf364d532021-06-09 17:07:33 +0100451 m_Runtime = armnn::IRuntime::Create(options);
surmeh013537c2c2018-05-18 16:31:43 +0100452 }
telsoa014fcda012018-03-09 14:13:49 +0000453
Keith Davis4914d0c2021-08-18 17:14:05 +0100454 // Configure the Profiler if the the profiling details are opted for
455 if (params.m_OutputDetailsOnlyToStdOut)
456 m_ProfilingDetailsMethod = armnn::ProfilingDetailsMethod::DetailsOnly;
457 else if (params.m_OutputDetailsToStdOut)
458 m_ProfilingDetailsMethod = armnn::ProfilingDetailsMethod::DetailsWithEvents;
459
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100460 std::string invalidBackends;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000461 if (!CheckRequestedBackendsAreValid(params.m_ComputeDevices, armnn::Optional<std::string&>(invalidBackends)))
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100462 {
463 throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends);
464 }
465
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100466 armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork*){}};
surmeh013537c2c2018-05-18 16:31:43 +0100467 {
Finn Williams4422cec2021-03-22 17:51:06 +0000468 const auto parsing_start_time = armnn::GetTimeNow();
469 armnn::INetworkPtr network = CreateNetworkImpl<IParser>::Create(params, m_InputBindings, m_OutputBindings);
470
471 ARMNN_LOG(info) << "Network parsing time: " << std::setprecision(2)
472 << std::fixed << armnn::GetTimeDuration(parsing_start_time).count() << " ms\n";
473
surmeh013537c2c2018-05-18 16:31:43 +0100474 ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
telsoa01c577f2c2018-08-31 09:22:23 +0100475
476 armnn::OptimizerOptions options;
477 options.m_ReduceFp32ToFp16 = params.m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000478 options.m_ReduceFp32ToBf16 = params.m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +0100479 options.m_Debug = params.m_PrintIntermediateLayers;
Finn Williamsd218d982021-08-09 13:00:08 +0100480 options.m_shapeInferenceMethod = params.m_InferOutputShape ?
481 armnn::ShapeInferenceMethod::InferAndValidate : armnn::ShapeInferenceMethod::ValidateOnly;
Derek Lamberti44c4c982021-10-14 00:28:37 +0100482 options.m_ProfilingEnabled = m_EnableProfiling;
Finn Williamsd218d982021-08-09 13:00:08 +0100483
Sadik Armagana25886e2020-09-15 17:17:08 +0100484 armnn::BackendOptions gpuAcc("GpuAcc",
485 {
Matthew Sloyan42432112021-01-08 10:30:51 +0000486 { "FastMathEnabled", params.m_EnableFastMath },
487 { "SaveCachedNetwork", params.m_SaveCachedNetwork },
Finn Williams40646322021-02-11 16:16:42 +0000488 { "CachedNetworkFilePath", params.m_CachedNetworkFilePath },
489 { "MLGOTuningFilePath", params.m_MLGOTuningFilePath }
Sadik Armagana25886e2020-09-15 17:17:08 +0100490 });
Finn Williams40646322021-02-11 16:16:42 +0000491
Sadik Armagana25886e2020-09-15 17:17:08 +0100492 armnn::BackendOptions cpuAcc("CpuAcc",
493 {
Matthew Sloyan0a7dc6b2021-02-10 16:50:53 +0000494 { "FastMathEnabled", params.m_EnableFastMath },
495 { "NumberOfThreads", params.m_NumberOfThreads }
Sadik Armagana25886e2020-09-15 17:17:08 +0100496 });
497 options.m_ModelOptions.push_back(gpuAcc);
498 options.m_ModelOptions.push_back(cpuAcc);
499
alered01a7227ac2020-05-07 14:58:29 +0100500 const auto optimization_start_time = armnn::GetTimeNow();
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000501 optNet = armnn::Optimize(*network, params.m_ComputeDevices, m_Runtime->GetDeviceSpec(), options);
alered01a7227ac2020-05-07 14:58:29 +0100502
503 ARMNN_LOG(info) << "Optimization time: " << std::setprecision(2)
504 << std::fixed << armnn::GetTimeDuration(optimization_start_time).count() << " ms\n";
505
telsoa01c577f2c2018-08-31 09:22:23 +0100506 if (!optNet)
507 {
508 throw armnn::Exception("Optimize returned nullptr");
509 }
Finn Williams4422cec2021-03-22 17:51:06 +0000510
511
surmeh013537c2c2018-05-18 16:31:43 +0100512 }
telsoa014fcda012018-03-09 14:13:49 +0000513
surmeh013537c2c2018-05-18 16:31:43 +0100514 if (params.m_VisualizePostOptimizationModel)
515 {
Francis Murtagh532a29d2020-06-29 11:50:01 +0100516 fs::path filename = params.m_ModelPath;
surmeh013537c2c2018-05-18 16:31:43 +0100517 filename.replace_extension("dot");
Rob Hughes9e10c2b2019-07-23 15:37:19 +0100518 std::fstream file(filename.c_str(), std::ios_base::out);
surmeh013537c2c2018-05-18 16:31:43 +0100519 optNet->SerializeToDot(file);
520 }
521
522 armnn::Status ret;
523 {
524 ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");
Matthew Sloyan4f29f152021-01-18 16:10:20 +0000525
526 const auto loading_start_time = armnn::GetTimeNow();
Francis Murtagh73d3e2e2021-04-29 14:23:04 +0100527 armnn::INetworkProperties networkProperties(params.m_AsyncEnabled,
528 armnn::MemorySource::Undefined,
Keith Davis554fa092021-07-20 11:25:22 +0100529 armnn::MemorySource::Undefined,
Keith Davisf4874862021-08-09 16:49:18 +0100530 enableProfiling,
Keith Davis4914d0c2021-08-18 17:14:05 +0100531 m_ProfilingDetailsMethod);
Sadik Armagana04a9d72021-04-27 10:02:10 +0100532 std::string errorMessage;
533 ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet), errorMessage, networkProperties);
Matthew Sloyan4f29f152021-01-18 16:10:20 +0000534
535 ARMNN_LOG(info) << "Network loading time: " << std::setprecision(2)
536 << std::fixed << armnn::GetTimeDuration(loading_start_time).count() << " ms\n";
Finn Williamsf364d532021-06-09 17:07:33 +0100537
538 if (params.m_AsyncEnabled && params.m_ThreadPoolSize > 0)
539 {
540 std::vector<std::shared_ptr<armnn::IWorkingMemHandle>> memHandles;
541 for (size_t i = 0; i < params.m_ThreadPoolSize; ++i)
542 {
543 memHandles.emplace_back(m_Runtime->CreateWorkingMemHandle(m_NetworkIdentifier));
544 }
545
546 m_Threadpool = std::make_unique<armnn::Threadpool>(params.m_ThreadPoolSize,
547 m_Runtime.get(),
548 memHandles);
549 }
surmeh013537c2c2018-05-18 16:31:43 +0100550 }
551
telsoa014fcda012018-03-09 14:13:49 +0000552 if (ret == armnn::Status::Failure)
553 {
554 throw armnn::Exception("IRuntime::LoadNetwork failed");
555 }
556 }
557
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000558 void CheckInputIndexIsValid(unsigned int inputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000559 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000560 if (m_InputBindings.size() < inputIndex + 1)
561 {
James Ward08f40162020-09-07 16:45:07 +0100562 throw armnn::Exception(fmt::format("Input index out of range: {}", inputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000563 }
telsoa014fcda012018-03-09 14:13:49 +0000564 }
565
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000566 void CheckOutputIndexIsValid(unsigned int outputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000567 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000568 if (m_OutputBindings.size() < outputIndex + 1)
569 {
James Ward08f40162020-09-07 16:45:07 +0100570 throw armnn::Exception(fmt::format("Output index out of range: {}", outputIndex));
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000571 }
572 }
573
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100574 unsigned int GetInputSize(unsigned int inputIndex = 0u) const
575 {
576 CheckInputIndexIsValid(inputIndex);
577 return m_InputBindings[inputIndex].second.GetNumElements();
578 }
579
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000580 unsigned int GetOutputSize(unsigned int outputIndex = 0u) const
581 {
582 CheckOutputIndexIsValid(outputIndex);
583 return m_OutputBindings[outputIndex].second.GetNumElements();
584 }
585
James Conroy7b4886f2019-04-11 10:23:58 +0100586 std::chrono::duration<double, std::milli> Run(
David Monahan6bb47a72021-10-22 12:57:28 +0100587 const std::vector<armnn::TContainer>& inputContainers,
588 std::vector<armnn::TContainer>& outputContainers)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000589 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000590 for (unsigned int i = 0; i < outputContainers.size(); ++i)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000591 {
592 const unsigned int expectedOutputDataSize = GetOutputSize(i);
Ferran Balaguerc602f292019-02-08 17:09:55 +0000593
James Ward6d9f5c52020-09-28 11:56:35 +0100594 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000595 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100596 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
Ferran Balaguerc602f292019-02-08 17:09:55 +0000597 if (actualOutputDataSize < expectedOutputDataSize)
598 {
Matthew Sloyan80c6b142020-09-08 12:00:32 +0100599 unsigned int outputIndex = i;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000600 throw armnn::Exception(
James Ward08f40162020-09-07 16:45:07 +0100601 fmt::format("Not enough data for output #{0}: expected "
602 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
Ferran Balaguerc602f292019-02-08 17:09:55 +0000603 }
604 },
605 outputContainers[i]);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000606 }
telsoa01c577f2c2018-08-31 09:22:23 +0100607
608 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
telsoa01c577f2c2018-08-31 09:22:23 +0100609
James Conroy7b4886f2019-04-11 10:23:58 +0100610 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
alered01a7227ac2020-05-07 14:58:29 +0100611 const auto start_time = armnn::GetTimeNow();
James Conroy7b4886f2019-04-11 10:23:58 +0100612
telsoa014fcda012018-03-09 14:13:49 +0000613 armnn::Status ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000614 MakeInputTensors(inputContainers),
615 MakeOutputTensors(outputContainers));
alered01a7227ac2020-05-07 14:58:29 +0100616 const auto duration = armnn::GetTimeDuration(start_time);
James Conroy7b4886f2019-04-11 10:23:58 +0100617
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100618 // if profiling is enabled print out the results
619 if (profiler && profiler->IsProfilingEnabled())
620 {
621 profiler->Print(std::cout);
622 }
623
telsoa014fcda012018-03-09 14:13:49 +0000624 if (ret == armnn::Status::Failure)
625 {
626 throw armnn::Exception("IRuntime::EnqueueWorkload failed");
627 }
James Conroy7b4886f2019-04-11 10:23:58 +0100628 else
629 {
alered01a7227ac2020-05-07 14:58:29 +0100630 return duration;
James Conroy7b4886f2019-04-11 10:23:58 +0100631 }
telsoa014fcda012018-03-09 14:13:49 +0000632 }
633
Finn Williamsf364d532021-06-09 17:07:33 +0100634 std::tuple<unsigned int, std::chrono::duration<double, std::milli>> RunAsync(
Sadik Armagana04a9d72021-04-27 10:02:10 +0100635 armnn::experimental::IWorkingMemHandle& workingMemHandleRef,
David Monahan6bb47a72021-10-22 12:57:28 +0100636 const std::vector<armnn::TContainer>& inputContainers,
637 std::vector<armnn::TContainer>& outputContainers,
Finn Williamsf364d532021-06-09 17:07:33 +0100638 unsigned int inferenceID)
Sadik Armagana04a9d72021-04-27 10:02:10 +0100639 {
640 for (unsigned int i = 0; i < outputContainers.size(); ++i)
641 {
642 const unsigned int expectedOutputDataSize = GetOutputSize(i);
643
644 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
645 {
646 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
647 if (actualOutputDataSize < expectedOutputDataSize)
648 {
649 unsigned int outputIndex = i;
650 throw armnn::Exception(
651 fmt::format("Not enough data for output #{0}: expected "
652 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
653 }
654 },
655 outputContainers[i]);
656 }
657
658 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
Sadik Armagana04a9d72021-04-27 10:02:10 +0100659
660 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
661 const auto start_time = armnn::GetTimeNow();
662
663 armnn::Status ret = m_Runtime->Execute(workingMemHandleRef,
664 MakeInputTensors(inputContainers),
665 MakeOutputTensors(outputContainers));
Sadik Armagana04a9d72021-04-27 10:02:10 +0100666
667 const auto duration = armnn::GetTimeDuration(start_time);
668
669 // if profiling is enabled print out the results
670 if (profiler && profiler->IsProfilingEnabled())
671 {
672 profiler->Print(std::cout);
673 }
674
675 if (ret == armnn::Status::Failure)
676 {
677 throw armnn::Exception(
678 fmt::format("IRuntime::Execute asynchronously failed for network #{0} on inference #{1}",
679 m_NetworkIdentifier, inferenceID));
680 }
681 else
682 {
683 return std::make_tuple(inferenceID, duration);
684 }
685 }
686
David Monahan6bb47a72021-10-22 12:57:28 +0100687 void RunAsync(const std::vector<armnn::TContainer>& inputContainers,
688 std::vector<armnn::TContainer>& outputContainers,
Finn Williamsf364d532021-06-09 17:07:33 +0100689 std::shared_ptr<armnn::IAsyncExecutionCallback> cb)
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100690 {
691 for (unsigned int i = 0; i < outputContainers.size(); ++i)
692 {
693 const unsigned int expectedOutputDataSize = GetOutputSize(i);
694
695 mapbox::util::apply_visitor([expectedOutputDataSize, i](auto&& value)
696 {
697 const unsigned int actualOutputDataSize = armnn::numeric_cast<unsigned int>(value.size());
698 if (actualOutputDataSize < expectedOutputDataSize)
699 {
700 unsigned int outputIndex = i;
701 throw armnn::Exception(
702 fmt::format("Not enough data for output #{0}: expected "
703 "{1} elements, got {2}", outputIndex, expectedOutputDataSize, actualOutputDataSize));
704 }
705 },
706 outputContainers[i]);
707 }
708
709 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100710
Finn Williamsf364d532021-06-09 17:07:33 +0100711 m_Threadpool->Schedule(m_NetworkIdentifier,
712 MakeInputTensors(inputContainers),
713 MakeOutputTensors(outputContainers),
714 armnn::QosExecPriority::Medium,
715 cb);
Kevin Mayb4b3ac92021-05-21 16:42:21 +0100716
717 // if profiling is enabled print out the results
718 if (profiler && profiler->IsProfilingEnabled())
719 {
720 profiler->Print(std::cout);
721 }
722 }
723
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100724 const armnn::BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100725 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000726 CheckInputIndexIsValid(inputIndex);
727 return m_InputBindings[inputIndex];
telsoa01c577f2c2018-08-31 09:22:23 +0100728 }
729
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100730 const std::vector<armnn::BindingPointInfo>& GetInputBindingInfos() const
telsoa01c577f2c2018-08-31 09:22:23 +0100731 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000732 return m_InputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100733 }
734
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100735 const armnn::BindingPointInfo& GetOutputBindingInfo(unsigned int outputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100736 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000737 CheckOutputIndexIsValid(outputIndex);
738 return m_OutputBindings[outputIndex];
739 }
740
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100741 const std::vector<armnn::BindingPointInfo>& GetOutputBindingInfos() const
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000742 {
743 return m_OutputBindings;
744 }
745
746 QuantizationParams GetQuantizationParams(unsigned int outputIndex = 0u) const
747 {
748 CheckOutputIndexIsValid(outputIndex);
749 return std::make_pair(m_OutputBindings[outputIndex].second.GetQuantizationScale(),
750 m_OutputBindings[outputIndex].second.GetQuantizationOffset());
751 }
752
Narumol Prangnawarat4628d052019-02-25 17:26:05 +0000753 QuantizationParams GetInputQuantizationParams(unsigned int inputIndex = 0u) const
754 {
755 CheckInputIndexIsValid(inputIndex);
756 return std::make_pair(m_InputBindings[inputIndex].second.GetQuantizationScale(),
757 m_InputBindings[inputIndex].second.GetQuantizationOffset());
758 }
759
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000760 std::vector<QuantizationParams> GetAllQuantizationParams() const
761 {
762 std::vector<QuantizationParams> quantizationParams;
763 for (unsigned int i = 0u; i < m_OutputBindings.size(); i++)
764 {
765 quantizationParams.push_back(GetQuantizationParams(i));
766 }
767 return quantizationParams;
telsoa01c577f2c2018-08-31 09:22:23 +0100768 }
769
Sadik Armagana04a9d72021-04-27 10:02:10 +0100770 std::unique_ptr<armnn::experimental::IWorkingMemHandle> CreateWorkingMemHandle()
771 {
772 return m_Runtime->CreateWorkingMemHandle(m_NetworkIdentifier);
773 }
774
telsoa014fcda012018-03-09 14:13:49 +0000775private:
telsoa01c577f2c2018-08-31 09:22:23 +0100776 armnn::NetworkId m_NetworkIdentifier;
777 std::shared_ptr<armnn::IRuntime> m_Runtime;
Finn Williamsf364d532021-06-09 17:07:33 +0100778 std::unique_ptr<armnn::Threadpool> m_Threadpool;
telsoa01c577f2c2018-08-31 09:22:23 +0100779
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100780 std::vector<armnn::BindingPointInfo> m_InputBindings;
781 std::vector<armnn::BindingPointInfo> m_OutputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100782 bool m_EnableProfiling;
Keith Davis4914d0c2021-08-18 17:14:05 +0100783 armnn::ProfilingDetailsMethod m_ProfilingDetailsMethod;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100784 std::string m_DynamicBackendsPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100785
telsoa014fcda012018-03-09 14:13:49 +0000786 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000787 armnn::InputTensors MakeInputTensors(const std::vector<TContainer>& inputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000788 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100789 return armnnUtils::MakeInputTensors(m_InputBindings, inputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000790 }
791
792 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000793 armnn::OutputTensors MakeOutputTensors(std::vector<TContainer>& outputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000794 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100795 return armnnUtils::MakeOutputTensors(m_OutputBindings, outputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000796 }
Ferran Balaguerc602f292019-02-08 17:09:55 +0000797};