blob: 711f7687b0f4c14271533781ba4965154a662df7 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Matteo Martincighc601aa62019-10-29 15:03:22 +00005
telsoa014fcda012018-03-09 14:13:49 +00006#pragma once
Matteo Martincighc601aa62019-10-29 15:03:22 +00007
David Beckf0b48452018-10-19 15:20:56 +01008#include <armnn/ArmNN.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +00009#include <armnn/BackendRegistry.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010010
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000011#if defined(ARMNN_SERIALIZER)
Derek Lamberti0028d1b2019-02-20 13:57:42 +000012#include "armnnDeserializer/IDeserializer.hpp"
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000013#endif
telsoa01c577f2c2018-08-31 09:22:23 +010014#if defined(ARMNN_TF_LITE_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010015#include <armnnTfLiteParser/ITfLiteParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010016#endif
telsoa01c577f2c2018-08-31 09:22:23 +010017#if defined(ARMNN_ONNX_PARSER)
David Beckf0b48452018-10-19 15:20:56 +010018#include <armnnOnnxParser/IOnnxParser.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010019#endif
telsoa014fcda012018-03-09 14:13:49 +000020
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000021#include <HeapProfiling.hpp>
Jim Flynn2fd61002019-05-03 12:54:26 +010022#include <TensorIOUtils.hpp>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000023
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000024#include <boost/algorithm/string/join.hpp>
surmeh013537c2c2018-05-18 16:31:43 +010025#include <boost/exception/exception.hpp>
26#include <boost/exception/diagnostic_information.hpp>
telsoa014fcda012018-03-09 14:13:49 +000027#include <boost/format.hpp>
28#include <boost/program_options.hpp>
surmeh013537c2c2018-05-18 16:31:43 +010029#include <boost/filesystem.hpp>
David Beckf0b48452018-10-19 15:20:56 +010030#include <boost/lexical_cast.hpp>
Ferran Balaguerc602f292019-02-08 17:09:55 +000031#include <boost/variant.hpp>
telsoa014fcda012018-03-09 14:13:49 +000032
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000033#include <algorithm>
James Conroy7b4886f2019-04-11 10:23:58 +010034#include <chrono>
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000035#include <iterator>
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010036#include <fstream>
telsoa014fcda012018-03-09 14:13:49 +000037#include <map>
38#include <string>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000039#include <vector>
telsoa01c577f2c2018-08-31 09:22:23 +010040#include <type_traits>
41
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +010042namespace
43{
44
45inline bool CheckRequestedBackendsAreValid(const std::vector<armnn::BackendId>& backendIds,
46 armnn::Optional<std::string&> invalidBackendIds = armnn::EmptyOptional())
47{
48 if (backendIds.empty())
49 {
50 return false;
51 }
52
53 armnn::BackendIdSet validBackendIds = armnn::BackendRegistryInstance().GetBackendIds();
54
55 bool allValid = true;
56 for (const auto& backendId : backendIds)
57 {
58 if (std::find(validBackendIds.begin(), validBackendIds.end(), backendId) == validBackendIds.end())
59 {
60 allValid = false;
61 if (invalidBackendIds)
62 {
63 if (!invalidBackendIds.value().empty())
64 {
65 invalidBackendIds.value() += ", ";
66 }
67 invalidBackendIds.value() += backendId;
68 }
69 }
70 }
71 return allValid;
72}
73
74} // anonymous namespace
75
telsoa01c577f2c2018-08-31 09:22:23 +010076namespace InferenceModelInternal
77{
Jim Flynnb4d7eae2019-05-01 14:44:27 +010078using BindingPointInfo = armnn::BindingPointInfo;
telsoa01c577f2c2018-08-31 09:22:23 +010079
80using QuantizationParams = std::pair<float,int32_t>;
81
82struct Params
83{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000084 std::string m_ModelPath;
85 std::vector<std::string> m_InputBindings;
86 std::vector<armnn::TensorShape> m_InputShapes;
87 std::vector<std::string> m_OutputBindings;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000088 std::vector<armnn::BackendId> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +010089 std::string m_DynamicBackendsPath;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +000090 size_t m_SubgraphId;
91 bool m_IsModelBinary;
92 bool m_VisualizePostOptimizationModel;
93 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +000094 bool m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +010095 bool m_PrintIntermediateLayers;
Derek Lamberti132563c2019-12-02 16:06:40 +000096 bool m_ParseUnsupported;
telsoa01c577f2c2018-08-31 09:22:23 +010097
98 Params()
Matteo Martincigh00dda4a2019-08-14 11:42:30 +010099 : m_ComputeDevices{}
telsoa01c577f2c2018-08-31 09:22:23 +0100100 , m_SubgraphId(0)
101 , m_IsModelBinary(true)
102 , m_VisualizePostOptimizationModel(false)
103 , m_EnableFp16TurboMode(false)
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000104 , m_EnableBf16TurboMode(false)
Matthew Jackson54658b92019-08-27 15:35:59 +0100105 , m_PrintIntermediateLayers(false)
Derek Lamberti132563c2019-12-02 16:06:40 +0000106 , m_ParseUnsupported(false)
telsoa01c577f2c2018-08-31 09:22:23 +0100107 {}
108};
109
110} // namespace InferenceModelInternal
111
112template <typename IParser>
113struct CreateNetworkImpl
114{
115public:
116 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100117
118 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100119 std::vector<armnn::BindingPointInfo>& inputBindings,
120 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100121 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000122 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100123
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000124 // Create a network from a file on disk
125 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100126
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000127 std::map<std::string, armnn::TensorShape> inputShapes;
128 if (!params.m_InputShapes.empty())
129 {
130 const size_t numInputShapes = params.m_InputShapes.size();
131 const size_t numInputBindings = params.m_InputBindings.size();
132 if (numInputShapes < numInputBindings)
133 {
134 throw armnn::Exception(boost::str(boost::format(
135 "Not every input has its tensor shape specified: expected=%1%, got=%2%")
136 % numInputBindings % numInputShapes));
137 }
telsoa01c577f2c2018-08-31 09:22:23 +0100138
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000139 for (size_t i = 0; i < numInputShapes; i++)
140 {
141 inputShapes[params.m_InputBindings[i]] = params.m_InputShapes[i];
142 }
143 }
telsoa01c577f2c2018-08-31 09:22:23 +0100144
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000145 std::vector<std::string> requestedOutputs = params.m_OutputBindings;
146 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
147
148 {
149 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
150 // Handle text and binary input differently by calling the corresponding parser function
151 network = (params.m_IsModelBinary ?
152 parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) :
153 parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs));
154 }
155
156 for (const std::string& inputLayerName : params.m_InputBindings)
157 {
158 inputBindings.push_back(parser->GetNetworkInputBindingInfo(inputLayerName));
159 }
160
161 for (const std::string& outputLayerName : params.m_OutputBindings)
162 {
163 outputBindings.push_back(parser->GetNetworkOutputBindingInfo(outputLayerName));
164 }
165
166 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100167 }
168};
169
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000170#if defined(ARMNN_SERIALIZER)
171template <>
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000172struct CreateNetworkImpl<armnnDeserializer::IDeserializer>
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000173{
174public:
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000175 using IParser = armnnDeserializer::IDeserializer;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000176 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000177
178 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100179 std::vector<armnn::BindingPointInfo>& inputBindings,
180 std::vector<armnn::BindingPointInfo>& outputBindings)
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000181 {
182 auto parser(IParser::Create());
183 BOOST_ASSERT(parser);
184
185 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
186
187 {
188 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000189
190 boost::system::error_code errorCode;
191 boost::filesystem::path pathToFile(params.m_ModelPath);
192 if (!boost::filesystem::exists(pathToFile, errorCode))
193 {
194 throw armnn::FileNotFoundException(boost::str(
195 boost::format("Cannot find the file (%1%) errorCode: %2% %3%") %
196 params.m_ModelPath %
197 errorCode %
198 CHECK_LOCATION().AsString()));
199 }
200 std::ifstream file(params.m_ModelPath, std::ios::binary);
201
202 network = parser->CreateNetworkFromBinary(file);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000203 }
204
Derek Lambertiff05cc52019-04-26 13:05:17 +0100205 unsigned int subgraphId = boost::numeric_cast<unsigned int>(params.m_SubgraphId);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000206
207 for (const std::string& inputLayerName : params.m_InputBindings)
208 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000209 armnnDeserializer::BindingPointInfo inputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100210 parser->GetNetworkInputBindingInfo(subgraphId, inputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000211 inputBindings.push_back(std::make_pair(inputBinding.m_BindingId, inputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000212 }
213
214 for (const std::string& outputLayerName : params.m_OutputBindings)
215 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000216 armnnDeserializer::BindingPointInfo outputBinding =
Derek Lambertiff05cc52019-04-26 13:05:17 +0100217 parser->GetNetworkOutputBindingInfo(subgraphId, outputLayerName);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000218 outputBindings.push_back(std::make_pair(outputBinding.m_BindingId, outputBinding.m_TensorInfo));
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000219 }
220
221 return network;
222 }
223};
224#endif
225
telsoa01c577f2c2018-08-31 09:22:23 +0100226#if defined(ARMNN_TF_LITE_PARSER)
227template <>
228struct CreateNetworkImpl<armnnTfLiteParser::ITfLiteParser>
229{
230public:
231 using IParser = armnnTfLiteParser::ITfLiteParser;
232 using Params = InferenceModelInternal::Params;
telsoa01c577f2c2018-08-31 09:22:23 +0100233
234 static armnn::INetworkPtr Create(const Params& params,
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100235 std::vector<armnn::BindingPointInfo>& inputBindings,
236 std::vector<armnn::BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100237 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000238 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100239
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000240 // Create a network from a file on disk
Derek Lamberti132563c2019-12-02 16:06:40 +0000241 IParser::TfLiteParserOptions options;
242 options.m_StandInLayerForUnsupported = params.m_ParseUnsupported;
243 auto parser(IParser::Create(options));
telsoa01c577f2c2018-08-31 09:22:23 +0100244
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000245 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100246
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000247 {
248 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
249 network = parser->CreateNetworkFromBinaryFile(modelPath.c_str());
250 }
telsoa01c577f2c2018-08-31 09:22:23 +0100251
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000252 for (const std::string& inputLayerName : params.m_InputBindings)
253 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100254 armnn::BindingPointInfo inputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000255 parser->GetNetworkInputBindingInfo(params.m_SubgraphId, inputLayerName);
256 inputBindings.push_back(inputBinding);
257 }
258
259 for (const std::string& outputLayerName : params.m_OutputBindings)
260 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100261 armnn::BindingPointInfo outputBinding =
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000262 parser->GetNetworkOutputBindingInfo(params.m_SubgraphId, outputLayerName);
263 outputBindings.push_back(outputBinding);
264 }
265
266 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100267 }
268};
269#endif
270
271#if defined(ARMNN_ONNX_PARSER)
272template <>
273struct CreateNetworkImpl<armnnOnnxParser::IOnnxParser>
274{
275public:
276 using IParser = armnnOnnxParser::IOnnxParser;
277 using Params = InferenceModelInternal::Params;
278 using BindingPointInfo = InferenceModelInternal::BindingPointInfo;
279
280 static armnn::INetworkPtr Create(const Params& params,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000281 std::vector<BindingPointInfo>& inputBindings,
282 std::vector<BindingPointInfo>& outputBindings)
telsoa01c577f2c2018-08-31 09:22:23 +0100283 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000284 const std::string& modelPath = params.m_ModelPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100285
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000286 // Create a network from a file on disk
287 auto parser(IParser::Create());
telsoa01c577f2c2018-08-31 09:22:23 +0100288
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000289 armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}};
telsoa01c577f2c2018-08-31 09:22:23 +0100290
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000291 {
292 ARMNN_SCOPED_HEAP_PROFILING("Parsing");
293 network = (params.m_IsModelBinary ?
294 parser->CreateNetworkFromBinaryFile(modelPath.c_str()) :
295 parser->CreateNetworkFromTextFile(modelPath.c_str()));
296 }
telsoa01c577f2c2018-08-31 09:22:23 +0100297
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000298 for (const std::string& inputLayerName : params.m_InputBindings)
299 {
300 BindingPointInfo inputBinding = parser->GetNetworkInputBindingInfo(inputLayerName);
301 inputBindings.push_back(inputBinding);
302 }
303
304 for (const std::string& outputLayerName : params.m_OutputBindings)
305 {
306 BindingPointInfo outputBinding = parser->GetNetworkOutputBindingInfo(outputLayerName);
307 outputBindings.push_back(outputBinding);
308 }
309
310 return network;
telsoa01c577f2c2018-08-31 09:22:23 +0100311 }
312};
313#endif
telsoa014fcda012018-03-09 14:13:49 +0000314
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000315
telsoa014fcda012018-03-09 14:13:49 +0000316
317template <typename IParser, typename TDataType>
318class InferenceModel
319{
320public:
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000321 using DataType = TDataType;
322 using Params = InferenceModelInternal::Params;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000323 using QuantizationParams = InferenceModelInternal::QuantizationParams;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000324 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
telsoa014fcda012018-03-09 14:13:49 +0000325
326 struct CommandLineOptions
327 {
328 std::string m_ModelDir;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000329 std::vector<std::string> m_ComputeDevices;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100330 std::string m_DynamicBackendsPath;
surmeh013537c2c2018-05-18 16:31:43 +0100331 bool m_VisualizePostOptimizationModel;
telsoa01c577f2c2018-08-31 09:22:23 +0100332 bool m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000333 bool m_EnableBf16TurboMode;
Pablo Tello507f39d2019-04-15 15:44:39 +0100334 std::string m_Labels;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000335
336 std::vector<armnn::BackendId> GetComputeDevicesAsBackendIds()
337 {
338 std::vector<armnn::BackendId> backendIds;
339 std::copy(m_ComputeDevices.begin(), m_ComputeDevices.end(), std::back_inserter(backendIds));
340 return backendIds;
341 }
telsoa014fcda012018-03-09 14:13:49 +0000342 };
343
344 static void AddCommandLineOptions(boost::program_options::options_description& desc, CommandLineOptions& options)
345 {
346 namespace po = boost::program_options;
347
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000348 const std::vector<std::string> defaultComputes = { "CpuAcc", "CpuRef" };
David Beckf0b48452018-10-19 15:20:56 +0100349
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100350 const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
351 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
352
telsoa014fcda012018-03-09 14:13:49 +0000353 desc.add_options()
354 ("model-dir,m", po::value<std::string>(&options.m_ModelDir)->required(),
telsoa01c577f2c2018-08-31 09:22:23 +0100355 "Path to directory containing model files (.caffemodel/.prototxt/.tflite)")
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000356 ("compute,c", po::value<std::vector<std::string>>(&options.m_ComputeDevices)->
357 default_value(defaultComputes, boost::algorithm::join(defaultComputes, ", "))->
358 multitoken(), backendsMessage.c_str())
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100359 ("dynamic-backends-path,b", po::value(&options.m_DynamicBackendsPath),
360 "Path where to load any available dynamic backend from. "
361 "If left empty (the default), dynamic backends will not be used.")
Pablo Tello507f39d2019-04-15 15:44:39 +0100362 ("labels,l", po::value<std::string>(&options.m_Labels),
363 "Text file containing one image filename - correct label pair per line, "
364 "used to test the accuracy of the network.")
surmeh013537c2c2018-05-18 16:31:43 +0100365 ("visualize-optimized-model,v",
366 po::value<bool>(&options.m_VisualizePostOptimizationModel)->default_value(false),
367 "Produce a dot file useful for visualizing the graph post optimization."
telsoa01c577f2c2018-08-31 09:22:23 +0100368 "The file will have the same name as the model with the .dot extention.")
369 ("fp16-turbo-mode", po::value<bool>(&options.m_EnableFp16TurboMode)->default_value(false),
370 "If this option is enabled FP32 layers, weights and biases will be converted "
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000371 "to FP16 where the backend supports it.")
372 ("bf16-turbo-mode", po::value<bool>(&options.m_EnableBf16TurboMode)->default_value(false),
373 "If this option is enabled FP32 layers, weights and biases will be converted "
374 "to BF16 where the backend supports it.");
telsoa014fcda012018-03-09 14:13:49 +0000375 }
376
Matthew Bentham3e68b972019-04-09 13:10:46 +0100377 InferenceModel(const Params& params,
378 bool enableProfiling,
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100379 const std::string& dynamicBackendsPath,
Matthew Bentham3e68b972019-04-09 13:10:46 +0100380 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
381 : m_EnableProfiling(enableProfiling)
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100382 , m_DynamicBackendsPath(dynamicBackendsPath)
telsoa014fcda012018-03-09 14:13:49 +0000383 {
telsoa01c577f2c2018-08-31 09:22:23 +0100384 if (runtime)
telsoa014fcda012018-03-09 14:13:49 +0000385 {
telsoa01c577f2c2018-08-31 09:22:23 +0100386 m_Runtime = runtime;
telsoa014fcda012018-03-09 14:13:49 +0000387 }
telsoa01c577f2c2018-08-31 09:22:23 +0100388 else
telsoa014fcda012018-03-09 14:13:49 +0000389 {
telsoa01c577f2c2018-08-31 09:22:23 +0100390 armnn::IRuntime::CreationOptions options;
Nina Drozd549ae372018-09-10 14:26:44 +0100391 options.m_EnableGpuProfiling = m_EnableProfiling;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100392 options.m_DynamicBackendsPath = m_DynamicBackendsPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100393 m_Runtime = std::move(armnn::IRuntime::Create(options));
surmeh013537c2c2018-05-18 16:31:43 +0100394 }
telsoa014fcda012018-03-09 14:13:49 +0000395
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100396 std::string invalidBackends;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000397 if (!CheckRequestedBackendsAreValid(params.m_ComputeDevices, armnn::Optional<std::string&>(invalidBackends)))
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100398 {
399 throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends);
400 }
401
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100402 armnn::INetworkPtr network = CreateNetworkImpl<IParser>::Create(params, m_InputBindings, m_OutputBindings);
telsoa014fcda012018-03-09 14:13:49 +0000403
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100404 armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork*){}};
surmeh013537c2c2018-05-18 16:31:43 +0100405 {
406 ARMNN_SCOPED_HEAP_PROFILING("Optimizing");
telsoa01c577f2c2018-08-31 09:22:23 +0100407
408 armnn::OptimizerOptions options;
409 options.m_ReduceFp32ToFp16 = params.m_EnableFp16TurboMode;
Narumol Prangnawaratd8cc8112020-03-24 13:54:05 +0000410 options.m_ReduceFp32ToBf16 = params.m_EnableBf16TurboMode;
Matthew Jackson54658b92019-08-27 15:35:59 +0100411 options.m_Debug = params.m_PrintIntermediateLayers;
telsoa01c577f2c2018-08-31 09:22:23 +0100412
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000413 optNet = armnn::Optimize(*network, params.m_ComputeDevices, m_Runtime->GetDeviceSpec(), options);
telsoa01c577f2c2018-08-31 09:22:23 +0100414 if (!optNet)
415 {
416 throw armnn::Exception("Optimize returned nullptr");
417 }
surmeh013537c2c2018-05-18 16:31:43 +0100418 }
telsoa014fcda012018-03-09 14:13:49 +0000419
surmeh013537c2c2018-05-18 16:31:43 +0100420 if (params.m_VisualizePostOptimizationModel)
421 {
422 boost::filesystem::path filename = params.m_ModelPath;
423 filename.replace_extension("dot");
Rob Hughes9e10c2b2019-07-23 15:37:19 +0100424 std::fstream file(filename.c_str(), std::ios_base::out);
surmeh013537c2c2018-05-18 16:31:43 +0100425 optNet->SerializeToDot(file);
426 }
427
428 armnn::Status ret;
429 {
430 ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork");
431 ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet));
432 }
433
telsoa014fcda012018-03-09 14:13:49 +0000434 if (ret == armnn::Status::Failure)
435 {
436 throw armnn::Exception("IRuntime::LoadNetwork failed");
437 }
438 }
439
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000440 void CheckInputIndexIsValid(unsigned int inputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000441 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000442 if (m_InputBindings.size() < inputIndex + 1)
443 {
444 throw armnn::Exception(boost::str(boost::format("Input index out of range: %1%") % inputIndex));
445 }
telsoa014fcda012018-03-09 14:13:49 +0000446 }
447
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000448 void CheckOutputIndexIsValid(unsigned int outputIndex) const
telsoa014fcda012018-03-09 14:13:49 +0000449 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000450 if (m_OutputBindings.size() < outputIndex + 1)
451 {
452 throw armnn::Exception(boost::str(boost::format("Output index out of range: %1%") % outputIndex));
453 }
454 }
455
Aron Virginas-Tarc82c8732019-10-24 17:07:43 +0100456 unsigned int GetInputSize(unsigned int inputIndex = 0u) const
457 {
458 CheckInputIndexIsValid(inputIndex);
459 return m_InputBindings[inputIndex].second.GetNumElements();
460 }
461
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000462 unsigned int GetOutputSize(unsigned int outputIndex = 0u) const
463 {
464 CheckOutputIndexIsValid(outputIndex);
465 return m_OutputBindings[outputIndex].second.GetNumElements();
466 }
467
James Conroy7b4886f2019-04-11 10:23:58 +0100468 std::chrono::duration<double, std::milli> Run(
469 const std::vector<TContainer>& inputContainers,
470 std::vector<TContainer>& outputContainers)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000471 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000472 for (unsigned int i = 0; i < outputContainers.size(); ++i)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000473 {
474 const unsigned int expectedOutputDataSize = GetOutputSize(i);
Ferran Balaguerc602f292019-02-08 17:09:55 +0000475
476 boost::apply_visitor([expectedOutputDataSize, i](auto&& value)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000477 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000478 const unsigned int actualOutputDataSize = boost::numeric_cast<unsigned int>(value.size());
479 if (actualOutputDataSize < expectedOutputDataSize)
480 {
481 unsigned int outputIndex = boost::numeric_cast<unsigned int>(i);
482 throw armnn::Exception(
483 boost::str(boost::format("Not enough data for output #%1%: expected "
484 "%2% elements, got %3%") % outputIndex % expectedOutputDataSize % actualOutputDataSize));
485 }
486 },
487 outputContainers[i]);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000488 }
telsoa01c577f2c2018-08-31 09:22:23 +0100489
490 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier);
491 if (profiler)
492 {
493 profiler->EnableProfiling(m_EnableProfiling);
494 }
495
James Conroy7b4886f2019-04-11 10:23:58 +0100496 // Start timer to record inference time in EnqueueWorkload (in milliseconds)
497 const auto start_time = GetCurrentTime();
498
telsoa014fcda012018-03-09 14:13:49 +0000499 armnn::Status ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier,
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000500 MakeInputTensors(inputContainers),
501 MakeOutputTensors(outputContainers));
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100502
James Conroy7b4886f2019-04-11 10:23:58 +0100503 const auto end_time = GetCurrentTime();
504
Sadik Armagan2b7a1582018-09-05 16:33:58 +0100505 // if profiling is enabled print out the results
506 if (profiler && profiler->IsProfilingEnabled())
507 {
508 profiler->Print(std::cout);
509 }
510
telsoa014fcda012018-03-09 14:13:49 +0000511 if (ret == armnn::Status::Failure)
512 {
513 throw armnn::Exception("IRuntime::EnqueueWorkload failed");
514 }
James Conroy7b4886f2019-04-11 10:23:58 +0100515 else
516 {
517 return std::chrono::duration<double, std::milli>(end_time - start_time);
518 }
telsoa014fcda012018-03-09 14:13:49 +0000519 }
520
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100521 const armnn::BindingPointInfo& GetInputBindingInfo(unsigned int inputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100522 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000523 CheckInputIndexIsValid(inputIndex);
524 return m_InputBindings[inputIndex];
telsoa01c577f2c2018-08-31 09:22:23 +0100525 }
526
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100527 const std::vector<armnn::BindingPointInfo>& GetInputBindingInfos() const
telsoa01c577f2c2018-08-31 09:22:23 +0100528 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000529 return m_InputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100530 }
531
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100532 const armnn::BindingPointInfo& GetOutputBindingInfo(unsigned int outputIndex = 0u) const
telsoa01c577f2c2018-08-31 09:22:23 +0100533 {
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000534 CheckOutputIndexIsValid(outputIndex);
535 return m_OutputBindings[outputIndex];
536 }
537
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100538 const std::vector<armnn::BindingPointInfo>& GetOutputBindingInfos() const
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000539 {
540 return m_OutputBindings;
541 }
542
543 QuantizationParams GetQuantizationParams(unsigned int outputIndex = 0u) const
544 {
545 CheckOutputIndexIsValid(outputIndex);
546 return std::make_pair(m_OutputBindings[outputIndex].second.GetQuantizationScale(),
547 m_OutputBindings[outputIndex].second.GetQuantizationOffset());
548 }
549
Narumol Prangnawarat4628d052019-02-25 17:26:05 +0000550 QuantizationParams GetInputQuantizationParams(unsigned int inputIndex = 0u) const
551 {
552 CheckInputIndexIsValid(inputIndex);
553 return std::make_pair(m_InputBindings[inputIndex].second.GetQuantizationScale(),
554 m_InputBindings[inputIndex].second.GetQuantizationOffset());
555 }
556
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000557 std::vector<QuantizationParams> GetAllQuantizationParams() const
558 {
559 std::vector<QuantizationParams> quantizationParams;
560 for (unsigned int i = 0u; i < m_OutputBindings.size(); i++)
561 {
562 quantizationParams.push_back(GetQuantizationParams(i));
563 }
564 return quantizationParams;
telsoa01c577f2c2018-08-31 09:22:23 +0100565 }
566
telsoa014fcda012018-03-09 14:13:49 +0000567private:
telsoa01c577f2c2018-08-31 09:22:23 +0100568 armnn::NetworkId m_NetworkIdentifier;
569 std::shared_ptr<armnn::IRuntime> m_Runtime;
570
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100571 std::vector<armnn::BindingPointInfo> m_InputBindings;
572 std::vector<armnn::BindingPointInfo> m_OutputBindings;
telsoa01c577f2c2018-08-31 09:22:23 +0100573 bool m_EnableProfiling;
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100574 std::string m_DynamicBackendsPath;
telsoa01c577f2c2018-08-31 09:22:23 +0100575
telsoa014fcda012018-03-09 14:13:49 +0000576 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000577 armnn::InputTensors MakeInputTensors(const std::vector<TContainer>& inputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000578 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100579 return armnnUtils::MakeInputTensors(m_InputBindings, inputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000580 }
581
582 template<typename TContainer>
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000583 armnn::OutputTensors MakeOutputTensors(std::vector<TContainer>& outputDataContainers)
telsoa014fcda012018-03-09 14:13:49 +0000584 {
Jim Flynn2fd61002019-05-03 12:54:26 +0100585 return armnnUtils::MakeOutputTensors(m_OutputBindings, outputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000586 }
James Conroy7b4886f2019-04-11 10:23:58 +0100587
588 std::chrono::high_resolution_clock::time_point GetCurrentTime()
589 {
590 return std::chrono::high_resolution_clock::now();
591 }
592
593 std::chrono::duration<double, std::milli> GetTimeDuration(
594 std::chrono::high_resolution_clock::time_point& start_time,
595 std::chrono::high_resolution_clock::time_point& end_time)
596 {
597 return std::chrono::duration<double, std::milli>(end_time - start_time);
598 }
599
Ferran Balaguerc602f292019-02-08 17:09:55 +0000600};