blob: c502ad96194420ab98e68c6214e9cb5d17117a12 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5
6#pragma once
7
8#include "armnn/IRuntime.hpp"
telsoa01c577f2c2018-08-31 09:22:23 +01009#include "armnnOnnxParser/IOnnxParser.hpp"
telsoa014fcda012018-03-09 14:13:49 +000010#include "test/TensorHelpers.hpp"
telsoa01c577f2c2018-08-31 09:22:23 +010011#include "VerificationHelpers.hpp"
12
13#include <boost/format.hpp>
telsoa014fcda012018-03-09 14:13:49 +000014#include <string>
15
telsoa01c577f2c2018-08-31 09:22:23 +010016namespace armnnUtils
17{
surmeh013537c2c2018-05-18 16:31:43 +010018
telsoa014fcda012018-03-09 14:13:49 +000019template<typename TParser>
20struct ParserPrototxtFixture
21{
22 ParserPrototxtFixture()
23 : m_Parser(TParser::Create())
telsoa014fcda012018-03-09 14:13:49 +000024 , m_NetworkIdentifier(-1)
surmeh013537c2c2018-05-18 16:31:43 +010025 {
telsoa01c577f2c2018-08-31 09:22:23 +010026 armnn::IRuntime::CreationOptions options;
27 m_Runtimes.push_back(std::make_pair(armnn::IRuntime::Create(options), armnn::Compute::CpuRef));
surmeh013537c2c2018-05-18 16:31:43 +010028
29#if ARMCOMPUTENEON_ENABLED
telsoa01c577f2c2018-08-31 09:22:23 +010030 m_Runtimes.push_back(std::make_pair(armnn::IRuntime::Create(options), armnn::Compute::CpuAcc));
surmeh013537c2c2018-05-18 16:31:43 +010031#endif
32
33#if ARMCOMPUTECL_ENABLED
telsoa01c577f2c2018-08-31 09:22:23 +010034 m_Runtimes.push_back(std::make_pair(armnn::IRuntime::Create(options), armnn::Compute::GpuAcc));
surmeh013537c2c2018-05-18 16:31:43 +010035#endif
36 }
telsoa014fcda012018-03-09 14:13:49 +000037
38 /// Parses and loads the network defined by the m_Prototext string.
39 /// @{
40 void SetupSingleInputSingleOutput(const std::string& inputName, const std::string& outputName);
41 void SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
42 const std::string& inputName,
43 const std::string& outputName);
44 void Setup(const std::map<std::string, armnn::TensorShape>& inputShapes,
45 const std::vector<std::string>& requestedOutputs);
telsoa01c577f2c2018-08-31 09:22:23 +010046 void Setup();
telsoa014fcda012018-03-09 14:13:49 +000047 /// @}
48
49 /// Executes the network with the given input tensor and checks the result against the given output tensor.
telsoa01c577f2c2018-08-31 09:22:23 +010050 /// This overload assumes that the network has a single input and a single output.
telsoa014fcda012018-03-09 14:13:49 +000051 template <std::size_t NumOutputDimensions>
52 void RunTest(const std::vector<float>& inputData, const std::vector<float>& expectedOutputData);
53
54 /// Executes the network with the given input tensors and checks the results against the given output tensors.
55 /// This overload supports multiple inputs and multiple outputs, identified by name.
56 template <std::size_t NumOutputDimensions>
57 void RunTest(const std::map<std::string, std::vector<float>>& inputData,
58 const std::map<std::string, std::vector<float>>& expectedOutputData);
59
surmeh013537c2c2018-05-18 16:31:43 +010060 std::string m_Prototext;
61 std::unique_ptr<TParser, void(*)(TParser* parser)> m_Parser;
telsoa01c577f2c2018-08-31 09:22:23 +010062 std::vector<std::pair<armnn::IRuntimePtr, armnn::Compute>> m_Runtimes;
surmeh013537c2c2018-05-18 16:31:43 +010063 armnn::NetworkId m_NetworkIdentifier;
telsoa014fcda012018-03-09 14:13:49 +000064
65 /// If the single-input-single-output overload of Setup() is called, these will store the input and output name
66 /// so they don't need to be passed to the single-input-single-output overload of RunTest().
67 /// @{
68 std::string m_SingleInputName;
69 std::string m_SingleOutputName;
70 /// @}
71};
72
73template<typename TParser>
74void ParserPrototxtFixture<TParser>::SetupSingleInputSingleOutput(const std::string& inputName,
75 const std::string& outputName)
76{
telsoa01c577f2c2018-08-31 09:22:23 +010077 // Stores the input and output name so they don't need to be passed to the single-input-single-output RunTest().
telsoa014fcda012018-03-09 14:13:49 +000078 m_SingleInputName = inputName;
79 m_SingleOutputName = outputName;
80 Setup({ }, { outputName });
81}
82
83template<typename TParser>
84void ParserPrototxtFixture<TParser>::SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
85 const std::string& inputName,
86 const std::string& outputName)
87{
telsoa01c577f2c2018-08-31 09:22:23 +010088 // Stores the input and output name so they don't need to be passed to the single-input-single-output RunTest().
telsoa014fcda012018-03-09 14:13:49 +000089 m_SingleInputName = inputName;
90 m_SingleOutputName = outputName;
91 Setup({ { inputName, inputTensorShape } }, { outputName });
92}
93
94template<typename TParser>
95void ParserPrototxtFixture<TParser>::Setup(const std::map<std::string, armnn::TensorShape>& inputShapes,
96 const std::vector<std::string>& requestedOutputs)
97{
surmeh013537c2c2018-05-18 16:31:43 +010098 for (auto&& runtime : m_Runtimes)
telsoa014fcda012018-03-09 14:13:49 +000099 {
telsoa01c577f2c2018-08-31 09:22:23 +0100100 std::string errorMessage;
101
surmeh013537c2c2018-05-18 16:31:43 +0100102 armnn::INetworkPtr network =
103 m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes, requestedOutputs);
jimfly016b0b53d2018-10-08 14:43:01 +0100104 auto optimized = Optimize(*network,
105 { runtime.second, armnn::Compute::CpuRef }, runtime.first->GetDeviceSpec());
telsoa01c577f2c2018-08-31 09:22:23 +0100106 armnn::Status ret = runtime.first->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
surmeh013537c2c2018-05-18 16:31:43 +0100107 if (ret != armnn::Status::Success)
108 {
telsoa01c577f2c2018-08-31 09:22:23 +0100109 throw armnn::Exception(boost::str(
110 boost::format("LoadNetwork failed with error: '%1%' %2%")
111 % errorMessage
112 % CHECK_LOCATION().AsString()));
113 }
114 }
115}
116
117template<typename TParser>
118void ParserPrototxtFixture<TParser>::Setup()
119{
120 for (auto&& runtime : m_Runtimes)
121 {
122 std::string errorMessage;
123
124 armnn::INetworkPtr network =
125 m_Parser->CreateNetworkFromString(m_Prototext.c_str());
jimfly016b0b53d2018-10-08 14:43:01 +0100126 auto optimized = Optimize(*network,
127 { runtime.second, armnn::Compute::CpuRef }, runtime.first->GetDeviceSpec());
telsoa01c577f2c2018-08-31 09:22:23 +0100128 armnn::Status ret = runtime.first->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
129 if (ret != armnn::Status::Success)
130 {
131 throw armnn::Exception(boost::str(
132 boost::format("LoadNetwork failed with error: '%1%' %2%")
133 % errorMessage
134 % CHECK_LOCATION().AsString()));
surmeh013537c2c2018-05-18 16:31:43 +0100135 }
telsoa014fcda012018-03-09 14:13:49 +0000136 }
137}
138
139template<typename TParser>
140template <std::size_t NumOutputDimensions>
141void ParserPrototxtFixture<TParser>::RunTest(const std::vector<float>& inputData,
142 const std::vector<float>& expectedOutputData)
143{
144 RunTest<NumOutputDimensions>({ { m_SingleInputName, inputData } }, { { m_SingleOutputName, expectedOutputData } });
145}
146
147template<typename TParser>
148template <std::size_t NumOutputDimensions>
149void ParserPrototxtFixture<TParser>::RunTest(const std::map<std::string, std::vector<float>>& inputData,
150 const std::map<std::string, std::vector<float>>& expectedOutputData)
151{
surmeh013537c2c2018-05-18 16:31:43 +0100152 for (auto&& runtime : m_Runtimes)
telsoa014fcda012018-03-09 14:13:49 +0000153 {
surmeh013537c2c2018-05-18 16:31:43 +0100154 using BindingPointInfo = std::pair<armnn::LayerBindingId, armnn::TensorInfo>;
telsoa014fcda012018-03-09 14:13:49 +0000155
telsoa01c577f2c2018-08-31 09:22:23 +0100156 // Sets up the armnn input tensors from the given vectors.
surmeh013537c2c2018-05-18 16:31:43 +0100157 armnn::InputTensors inputTensors;
158 for (auto&& it : inputData)
159 {
160 BindingPointInfo bindingInfo = m_Parser->GetNetworkInputBindingInfo(it.first);
161 inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) });
162 }
telsoa014fcda012018-03-09 14:13:49 +0000163
telsoa01c577f2c2018-08-31 09:22:23 +0100164 // Allocates storage for the output tensors to be written to and sets up the armnn output tensors.
surmeh013537c2c2018-05-18 16:31:43 +0100165 std::map<std::string, boost::multi_array<float, NumOutputDimensions>> outputStorage;
166 armnn::OutputTensors outputTensors;
167 for (auto&& it : expectedOutputData)
168 {
169 BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first);
170 outputStorage.emplace(it.first, MakeTensor<float, NumOutputDimensions>(bindingInfo.second));
171 outputTensors.push_back(
172 { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) });
173 }
telsoa014fcda012018-03-09 14:13:49 +0000174
telsoa01c577f2c2018-08-31 09:22:23 +0100175 runtime.first->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
surmeh013537c2c2018-05-18 16:31:43 +0100176
telsoa01c577f2c2018-08-31 09:22:23 +0100177 // Compares each output tensor to the expected values.
surmeh013537c2c2018-05-18 16:31:43 +0100178 for (auto&& it : expectedOutputData)
179 {
180 BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first);
telsoa01c577f2c2018-08-31 09:22:23 +0100181 if (bindingInfo.second.GetNumElements() != it.second.size())
182 {
183 throw armnn::Exception(
184 boost::str(
185 boost::format("Output tensor %1% is expected to have %2% elements. "
186 "%3% elements supplied. %4%") %
187 it.first %
188 bindingInfo.second.GetNumElements() %
189 it.second.size() %
190 CHECK_LOCATION().AsString()));
191 }
surmeh013537c2c2018-05-18 16:31:43 +0100192 auto outputExpected = MakeTensor<float, NumOutputDimensions>(bindingInfo.second, it.second);
193 BOOST_TEST(CompareTensors(outputExpected, outputStorage[it.first]));
194 }
telsoa014fcda012018-03-09 14:13:49 +0000195 }
196}
telsoa01c577f2c2018-08-31 09:22:23 +0100197
198} // namespace armnnUtils