telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 1 | // |
| 2 | // Copyright © 2017 Arm Ltd. All rights reserved. |
David Beck | ecb56cd | 2018-09-05 12:52:57 +0100 | [diff] [blame] | 3 | // SPDX-License-Identifier: MIT |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 4 | // |
| 5 | #pragma once |
David Beck | f0b4845 | 2018-10-19 15:20:56 +0100 | [diff] [blame] | 6 | #include <armnn/ArmNN.hpp> |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 7 | |
| 8 | #if defined(ARMNN_TF_LITE_PARSER) |
David Beck | f0b4845 | 2018-10-19 15:20:56 +0100 | [diff] [blame] | 9 | #include <armnnTfLiteParser/ITfLiteParser.hpp> |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 10 | #endif |
| 11 | |
| 12 | #include <HeapProfiling.hpp> |
| 13 | #if defined(ARMNN_ONNX_PARSER) |
David Beck | f0b4845 | 2018-10-19 15:20:56 +0100 | [diff] [blame] | 14 | #include <armnnOnnxParser/IOnnxParser.hpp> |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 15 | #endif |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 16 | |
David Beck | 1b61be5 | 2018-11-08 09:19:14 +0000 | [diff] [blame] | 17 | #include <backendsCommon/BackendRegistry.hpp> |
Aron Virginas-Tar | 5cc8e56 | 2018-10-23 15:14:46 +0100 | [diff] [blame] | 18 | |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 19 | #include <boost/exception/exception.hpp> |
| 20 | #include <boost/exception/diagnostic_information.hpp> |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 21 | #include <boost/log/trivial.hpp> |
| 22 | #include <boost/format.hpp> |
| 23 | #include <boost/program_options.hpp> |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 24 | #include <boost/filesystem.hpp> |
David Beck | f0b4845 | 2018-10-19 15:20:56 +0100 | [diff] [blame] | 25 | #include <boost/lexical_cast.hpp> |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 26 | |
Aron Virginas-Tar | 5cc8e56 | 2018-10-23 15:14:46 +0100 | [diff] [blame] | 27 | #include <fstream> |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 28 | #include <map> |
| 29 | #include <string> |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 30 | #include <type_traits> |
| 31 | |
Aron Virginas-Tar | 5cc8e56 | 2018-10-23 15:14:46 +0100 | [diff] [blame] | 32 | namespace |
| 33 | { |
| 34 | |
| 35 | inline bool CheckRequestedBackendsAreValid(const std::vector<armnn::BackendId>& backendIds, |
| 36 | armnn::Optional<std::string&> invalidBackendIds = armnn::EmptyOptional()) |
| 37 | { |
| 38 | if (backendIds.empty()) |
| 39 | { |
| 40 | return false; |
| 41 | } |
| 42 | |
| 43 | armnn::BackendIdSet validBackendIds = armnn::BackendRegistryInstance().GetBackendIds(); |
| 44 | |
| 45 | bool allValid = true; |
| 46 | for (const auto& backendId : backendIds) |
| 47 | { |
| 48 | if (std::find(validBackendIds.begin(), validBackendIds.end(), backendId) == validBackendIds.end()) |
| 49 | { |
| 50 | allValid = false; |
| 51 | if (invalidBackendIds) |
| 52 | { |
| 53 | if (!invalidBackendIds.value().empty()) |
| 54 | { |
| 55 | invalidBackendIds.value() += ", "; |
| 56 | } |
| 57 | invalidBackendIds.value() += backendId; |
| 58 | } |
| 59 | } |
| 60 | } |
| 61 | return allValid; |
| 62 | } |
| 63 | |
| 64 | } // anonymous namespace |
| 65 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 66 | namespace InferenceModelInternal |
| 67 | { |
| 68 | // This needs to go when the armnnCaffeParser, armnnTfParser and armnnTfLiteParser |
| 69 | // definitions of BindingPointInfo gets consolidated. |
| 70 | using BindingPointInfo = std::pair<armnn::LayerBindingId, armnn::TensorInfo>; |
| 71 | |
| 72 | using QuantizationParams = std::pair<float,int32_t>; |
| 73 | |
| 74 | struct Params |
| 75 | { |
| 76 | std::string m_ModelPath; |
| 77 | std::string m_InputBinding; |
| 78 | std::string m_OutputBinding; |
| 79 | const armnn::TensorShape* m_InputTensorShape; |
David Beck | f0b4845 | 2018-10-19 15:20:56 +0100 | [diff] [blame] | 80 | std::vector<armnn::BackendId> m_ComputeDevice; |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 81 | bool m_EnableProfiling; |
| 82 | size_t m_SubgraphId; |
| 83 | bool m_IsModelBinary; |
| 84 | bool m_VisualizePostOptimizationModel; |
| 85 | bool m_EnableFp16TurboMode; |
| 86 | |
| 87 | Params() |
| 88 | : m_InputTensorShape(nullptr) |
| 89 | , m_ComputeDevice{armnn::Compute::CpuRef} |
| 90 | , m_EnableProfiling(false) |
| 91 | , m_SubgraphId(0) |
| 92 | , m_IsModelBinary(true) |
| 93 | , m_VisualizePostOptimizationModel(false) |
| 94 | , m_EnableFp16TurboMode(false) |
| 95 | {} |
| 96 | }; |
| 97 | |
| 98 | } // namespace InferenceModelInternal |
| 99 | |
| 100 | template <typename IParser> |
| 101 | struct CreateNetworkImpl |
| 102 | { |
| 103 | public: |
| 104 | using Params = InferenceModelInternal::Params; |
| 105 | using BindingPointInfo = InferenceModelInternal::BindingPointInfo; |
| 106 | |
| 107 | static armnn::INetworkPtr Create(const Params& params, |
| 108 | BindingPointInfo& inputBindings, |
| 109 | BindingPointInfo& outputBindings) |
| 110 | { |
| 111 | const std::string& modelPath = params.m_ModelPath; |
| 112 | |
| 113 | // Create a network from a file on disk |
| 114 | auto parser(IParser::Create()); |
| 115 | |
| 116 | std::map<std::string, armnn::TensorShape> inputShapes; |
| 117 | if (params.m_InputTensorShape) |
| 118 | { |
| 119 | inputShapes[params.m_InputBinding] = *params.m_InputTensorShape; |
| 120 | } |
| 121 | std::vector<std::string> requestedOutputs{ params.m_OutputBinding }; |
| 122 | armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}}; |
| 123 | |
| 124 | { |
| 125 | ARMNN_SCOPED_HEAP_PROFILING("Parsing"); |
| 126 | // Handle text and binary input differently by calling the corresponding parser function |
| 127 | network = (params.m_IsModelBinary ? |
| 128 | parser->CreateNetworkFromBinaryFile(modelPath.c_str(), inputShapes, requestedOutputs) : |
| 129 | parser->CreateNetworkFromTextFile(modelPath.c_str(), inputShapes, requestedOutputs)); |
| 130 | } |
| 131 | |
| 132 | inputBindings = parser->GetNetworkInputBindingInfo(params.m_InputBinding); |
| 133 | outputBindings = parser->GetNetworkOutputBindingInfo(params.m_OutputBinding); |
| 134 | return network; |
| 135 | } |
| 136 | }; |
| 137 | |
| 138 | #if defined(ARMNN_TF_LITE_PARSER) |
| 139 | template <> |
| 140 | struct CreateNetworkImpl<armnnTfLiteParser::ITfLiteParser> |
| 141 | { |
| 142 | public: |
| 143 | using IParser = armnnTfLiteParser::ITfLiteParser; |
| 144 | using Params = InferenceModelInternal::Params; |
| 145 | using BindingPointInfo = InferenceModelInternal::BindingPointInfo; |
| 146 | |
| 147 | static armnn::INetworkPtr Create(const Params& params, |
| 148 | BindingPointInfo& inputBindings, |
| 149 | BindingPointInfo& outputBindings) |
| 150 | { |
| 151 | const std::string& modelPath = params.m_ModelPath; |
| 152 | |
| 153 | // Create a network from a file on disk |
| 154 | auto parser(IParser::Create()); |
| 155 | |
| 156 | armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}}; |
| 157 | |
| 158 | { |
| 159 | ARMNN_SCOPED_HEAP_PROFILING("Parsing"); |
| 160 | network = parser->CreateNetworkFromBinaryFile(modelPath.c_str()); |
| 161 | } |
| 162 | |
| 163 | inputBindings = parser->GetNetworkInputBindingInfo(params.m_SubgraphId, params.m_InputBinding); |
| 164 | outputBindings = parser->GetNetworkOutputBindingInfo(params.m_SubgraphId, params.m_OutputBinding); |
| 165 | return network; |
| 166 | } |
| 167 | }; |
| 168 | #endif |
| 169 | |
| 170 | #if defined(ARMNN_ONNX_PARSER) |
| 171 | template <> |
| 172 | struct CreateNetworkImpl<armnnOnnxParser::IOnnxParser> |
| 173 | { |
| 174 | public: |
| 175 | using IParser = armnnOnnxParser::IOnnxParser; |
| 176 | using Params = InferenceModelInternal::Params; |
| 177 | using BindingPointInfo = InferenceModelInternal::BindingPointInfo; |
| 178 | |
| 179 | static armnn::INetworkPtr Create(const Params& params, |
| 180 | BindingPointInfo& inputBindings, |
| 181 | BindingPointInfo& outputBindings) |
| 182 | { |
| 183 | const std::string& modelPath = params.m_ModelPath; |
| 184 | |
| 185 | // Create a network from a file on disk |
| 186 | auto parser(IParser::Create()); |
| 187 | |
| 188 | armnn::INetworkPtr network{nullptr, [](armnn::INetwork *){}}; |
| 189 | |
| 190 | { |
| 191 | ARMNN_SCOPED_HEAP_PROFILING("Parsing"); |
| 192 | network = (params.m_IsModelBinary ? |
| 193 | parser->CreateNetworkFromBinaryFile(modelPath.c_str()) : |
| 194 | parser->CreateNetworkFromTextFile(modelPath.c_str())); |
| 195 | } |
| 196 | |
| 197 | inputBindings = parser->GetNetworkInputBindingInfo(params.m_InputBinding); |
| 198 | outputBindings = parser->GetNetworkOutputBindingInfo(params.m_OutputBinding); |
| 199 | return network; |
| 200 | } |
| 201 | }; |
| 202 | #endif |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 203 | |
| 204 | template<typename TContainer> |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 205 | inline armnn::InputTensors MakeInputTensors(const InferenceModelInternal::BindingPointInfo& input, |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 206 | const TContainer& inputTensorData) |
| 207 | { |
| 208 | if (inputTensorData.size() != input.second.GetNumElements()) |
| 209 | { |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 210 | try |
| 211 | { |
| 212 | throw armnn::Exception(boost::str(boost::format("Input tensor has incorrect size. Expected %1% elements " |
| 213 | "but got %2%.") % input.second.GetNumElements() % inputTensorData.size())); |
| 214 | } catch (const boost::exception& e) |
| 215 | { |
| 216 | // Coverity fix: it should not be possible to get here but boost::str and boost::format can both |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 217 | // throw uncaught exceptions, convert them to armnn exceptions and rethrow. |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 218 | throw armnn::Exception(diagnostic_information(e)); |
| 219 | } |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 220 | } |
| 221 | return { { input.first, armnn::ConstTensor(input.second, inputTensorData.data()) } }; |
| 222 | } |
| 223 | |
| 224 | template<typename TContainer> |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 225 | inline armnn::OutputTensors MakeOutputTensors(const InferenceModelInternal::BindingPointInfo& output, |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 226 | TContainer& outputTensorData) |
| 227 | { |
| 228 | if (outputTensorData.size() != output.second.GetNumElements()) |
| 229 | { |
| 230 | throw armnn::Exception("Output tensor has incorrect size"); |
| 231 | } |
| 232 | return { { output.first, armnn::Tensor(output.second, outputTensorData.data()) } }; |
| 233 | } |
| 234 | |
| 235 | template <typename IParser, typename TDataType> |
| 236 | class InferenceModel |
| 237 | { |
| 238 | public: |
| 239 | using DataType = TDataType; |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 240 | using Params = InferenceModelInternal::Params; |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 241 | |
| 242 | struct CommandLineOptions |
| 243 | { |
| 244 | std::string m_ModelDir; |
David Beck | f0b4845 | 2018-10-19 15:20:56 +0100 | [diff] [blame] | 245 | std::vector<armnn::BackendId> m_ComputeDevice; |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 246 | bool m_VisualizePostOptimizationModel; |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 247 | bool m_EnableFp16TurboMode; |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 248 | }; |
| 249 | |
| 250 | static void AddCommandLineOptions(boost::program_options::options_description& desc, CommandLineOptions& options) |
| 251 | { |
| 252 | namespace po = boost::program_options; |
| 253 | |
David Beck | f0b4845 | 2018-10-19 15:20:56 +0100 | [diff] [blame] | 254 | std::vector<armnn::BackendId> defaultBackends = {armnn::Compute::CpuAcc, armnn::Compute::CpuRef}; |
| 255 | |
Aron Virginas-Tar | 5cc8e56 | 2018-10-23 15:14:46 +0100 | [diff] [blame] | 256 | const std::string backendsMessage = "Which device to run layers on by default. Possible choices: " |
| 257 | + armnn::BackendRegistryInstance().GetBackendIdsAsString(); |
| 258 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 259 | desc.add_options() |
| 260 | ("model-dir,m", po::value<std::string>(&options.m_ModelDir)->required(), |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 261 | "Path to directory containing model files (.caffemodel/.prototxt/.tflite)") |
David Beck | f0b4845 | 2018-10-19 15:20:56 +0100 | [diff] [blame] | 262 | ("compute,c", po::value<std::vector<armnn::BackendId>>(&options.m_ComputeDevice)->default_value |
Aron Virginas-Tar | 5cc8e56 | 2018-10-23 15:14:46 +0100 | [diff] [blame] | 263 | (defaultBackends), backendsMessage.c_str()) |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 264 | ("visualize-optimized-model,v", |
| 265 | po::value<bool>(&options.m_VisualizePostOptimizationModel)->default_value(false), |
| 266 | "Produce a dot file useful for visualizing the graph post optimization." |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 267 | "The file will have the same name as the model with the .dot extention.") |
| 268 | ("fp16-turbo-mode", po::value<bool>(&options.m_EnableFp16TurboMode)->default_value(false), |
| 269 | "If this option is enabled FP32 layers, weights and biases will be converted " |
| 270 | "to FP16 where the backend supports it."); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 271 | } |
| 272 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 273 | InferenceModel(const Params& params, const std::shared_ptr<armnn::IRuntime>& runtime = nullptr) |
| 274 | : m_EnableProfiling(params.m_EnableProfiling) |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 275 | { |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 276 | if (runtime) |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 277 | { |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 278 | m_Runtime = runtime; |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 279 | } |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 280 | else |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 281 | { |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 282 | armnn::IRuntime::CreationOptions options; |
Nina Drozd | 549ae37 | 2018-09-10 14:26:44 +0100 | [diff] [blame] | 283 | options.m_EnableGpuProfiling = m_EnableProfiling; |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 284 | m_Runtime = std::move(armnn::IRuntime::Create(options)); |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 285 | } |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 286 | |
Aron Virginas-Tar | 5cc8e56 | 2018-10-23 15:14:46 +0100 | [diff] [blame] | 287 | std::string invalidBackends; |
| 288 | if (!CheckRequestedBackendsAreValid(params.m_ComputeDevice, armnn::Optional<std::string&>(invalidBackends))) |
| 289 | { |
| 290 | throw armnn::Exception("Some backend IDs are invalid: " + invalidBackends); |
| 291 | } |
| 292 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 293 | armnn::INetworkPtr network = CreateNetworkImpl<IParser>::Create(params, m_InputBindingInfo, |
| 294 | m_OutputBindingInfo); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 295 | |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 296 | armnn::IOptimizedNetworkPtr optNet{nullptr, [](armnn::IOptimizedNetwork *){}}; |
| 297 | { |
| 298 | ARMNN_SCOPED_HEAP_PROFILING("Optimizing"); |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 299 | |
| 300 | armnn::OptimizerOptions options; |
| 301 | options.m_ReduceFp32ToFp16 = params.m_EnableFp16TurboMode; |
| 302 | |
| 303 | optNet = armnn::Optimize(*network, params.m_ComputeDevice, m_Runtime->GetDeviceSpec(), options); |
| 304 | if (!optNet) |
| 305 | { |
| 306 | throw armnn::Exception("Optimize returned nullptr"); |
| 307 | } |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 308 | } |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 309 | |
surmeh01 | 3537c2c | 2018-05-18 16:31:43 +0100 | [diff] [blame] | 310 | if (params.m_VisualizePostOptimizationModel) |
| 311 | { |
| 312 | boost::filesystem::path filename = params.m_ModelPath; |
| 313 | filename.replace_extension("dot"); |
| 314 | std::fstream file(filename.c_str(),file.out); |
| 315 | optNet->SerializeToDot(file); |
| 316 | } |
| 317 | |
| 318 | armnn::Status ret; |
| 319 | { |
| 320 | ARMNN_SCOPED_HEAP_PROFILING("LoadNetwork"); |
| 321 | ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, std::move(optNet)); |
| 322 | } |
| 323 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 324 | if (ret == armnn::Status::Failure) |
| 325 | { |
| 326 | throw armnn::Exception("IRuntime::LoadNetwork failed"); |
| 327 | } |
| 328 | } |
| 329 | |
| 330 | unsigned int GetOutputSize() const |
| 331 | { |
| 332 | return m_OutputBindingInfo.second.GetNumElements(); |
| 333 | } |
| 334 | |
| 335 | void Run(const std::vector<TDataType>& input, std::vector<TDataType>& output) |
| 336 | { |
| 337 | BOOST_ASSERT(output.size() == GetOutputSize()); |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 338 | |
| 339 | std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkIdentifier); |
| 340 | if (profiler) |
| 341 | { |
| 342 | profiler->EnableProfiling(m_EnableProfiling); |
| 343 | } |
| 344 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 345 | armnn::Status ret = m_Runtime->EnqueueWorkload(m_NetworkIdentifier, |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 346 | MakeInputTensors(input), |
| 347 | MakeOutputTensors(output)); |
Sadik Armagan | 2b7a158 | 2018-09-05 16:33:58 +0100 | [diff] [blame] | 348 | |
| 349 | // if profiling is enabled print out the results |
| 350 | if (profiler && profiler->IsProfilingEnabled()) |
| 351 | { |
| 352 | profiler->Print(std::cout); |
| 353 | } |
| 354 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 355 | if (ret == armnn::Status::Failure) |
| 356 | { |
| 357 | throw armnn::Exception("IRuntime::EnqueueWorkload failed"); |
| 358 | } |
| 359 | } |
| 360 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 361 | const InferenceModelInternal::BindingPointInfo & GetInputBindingInfo() const |
| 362 | { |
| 363 | return m_InputBindingInfo; |
| 364 | } |
| 365 | |
| 366 | const InferenceModelInternal::BindingPointInfo & GetOutputBindingInfo() const |
| 367 | { |
| 368 | return m_OutputBindingInfo; |
| 369 | } |
| 370 | |
| 371 | InferenceModelInternal::QuantizationParams GetQuantizationParams() const |
| 372 | { |
| 373 | return std::make_pair(m_OutputBindingInfo.second.GetQuantizationScale(), |
| 374 | m_OutputBindingInfo.second.GetQuantizationOffset()); |
| 375 | } |
| 376 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 377 | private: |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 378 | armnn::NetworkId m_NetworkIdentifier; |
| 379 | std::shared_ptr<armnn::IRuntime> m_Runtime; |
| 380 | |
| 381 | InferenceModelInternal::BindingPointInfo m_InputBindingInfo; |
| 382 | InferenceModelInternal::BindingPointInfo m_OutputBindingInfo; |
| 383 | bool m_EnableProfiling; |
| 384 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 385 | template<typename TContainer> |
| 386 | armnn::InputTensors MakeInputTensors(const TContainer& inputTensorData) |
| 387 | { |
| 388 | return ::MakeInputTensors(m_InputBindingInfo, inputTensorData); |
| 389 | } |
| 390 | |
| 391 | template<typename TContainer> |
| 392 | armnn::OutputTensors MakeOutputTensors(TContainer& outputTensorData) |
| 393 | { |
| 394 | return ::MakeOutputTensors(m_OutputBindingInfo, outputTensorData); |
| 395 | } |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 396 | }; |