blob: 7ae0742b8e8b5a22a316339f8b2ebfe6a3b245c4 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5
6#pragma once
7
Aron Virginas-Tar9c5db112018-10-25 11:10:49 +01008#include <armnn/IRuntime.hpp>
narpra016f37f832018-12-21 18:30:00 +00009
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000010#include <test/TensorHelpers.hpp>
Aron Virginas-Tar9c5db112018-10-25 11:10:49 +010011
narpra016f37f832018-12-21 18:30:00 +000012#include <Network.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000013#include <VerificationHelpers.hpp>
Aron Virginas-Tar9c5db112018-10-25 11:10:49 +010014
telsoa01c577f2c2018-08-31 09:22:23 +010015#include <boost/format.hpp>
Aron Virginas-Tar9c5db112018-10-25 11:10:49 +010016
Ferran Balaguer51dd62f2019-01-11 19:29:18 +000017#include <iomanip>
telsoa014fcda012018-03-09 14:13:49 +000018#include <string>
19
telsoa01c577f2c2018-08-31 09:22:23 +010020namespace armnnUtils
21{
surmeh013537c2c2018-05-18 16:31:43 +010022
telsoa014fcda012018-03-09 14:13:49 +000023template<typename TParser>
24struct ParserPrototxtFixture
25{
26 ParserPrototxtFixture()
27 : m_Parser(TParser::Create())
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +000028 , m_Runtime(armnn::IRuntime::Create(armnn::IRuntime::CreationOptions()))
telsoa014fcda012018-03-09 14:13:49 +000029 , m_NetworkIdentifier(-1)
surmeh013537c2c2018-05-18 16:31:43 +010030 {
surmeh013537c2c2018-05-18 16:31:43 +010031 }
telsoa014fcda012018-03-09 14:13:49 +000032
33 /// Parses and loads the network defined by the m_Prototext string.
34 /// @{
35 void SetupSingleInputSingleOutput(const std::string& inputName, const std::string& outputName);
36 void SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
37 const std::string& inputName,
38 const std::string& outputName);
Ferran Balaguer51dd62f2019-01-11 19:29:18 +000039 void SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
40 const armnn::TensorShape& outputTensorShape,
41 const std::string& inputName,
42 const std::string& outputName);
telsoa014fcda012018-03-09 14:13:49 +000043 void Setup(const std::map<std::string, armnn::TensorShape>& inputShapes,
44 const std::vector<std::string>& requestedOutputs);
telsoa01c577f2c2018-08-31 09:22:23 +010045 void Setup();
narpra016f37f832018-12-21 18:30:00 +000046 armnn::IOptimizedNetworkPtr SetupOptimizedNetwork(
47 const std::map<std::string,armnn::TensorShape>& inputShapes,
48 const std::vector<std::string>& requestedOutputs);
telsoa014fcda012018-03-09 14:13:49 +000049 /// @}
50
51 /// Executes the network with the given input tensor and checks the result against the given output tensor.
telsoa01c577f2c2018-08-31 09:22:23 +010052 /// This overload assumes that the network has a single input and a single output.
telsoa014fcda012018-03-09 14:13:49 +000053 template <std::size_t NumOutputDimensions>
54 void RunTest(const std::vector<float>& inputData, const std::vector<float>& expectedOutputData);
55
kevmay012b4d88e2019-01-24 14:05:09 +000056 /// Executes the network with the given input tensor and checks the result against the given output tensor.
57 /// Calls RunTest with output type of uint8_t for checking comparison operators.
58 template <std::size_t NumOutputDimensions>
59 void RunComparisonTest(const std::map<std::string, std::vector<float>>& inputData,
60 const std::map<std::string, std::vector<uint8_t>>& expectedOutputData);
61
telsoa014fcda012018-03-09 14:13:49 +000062 /// Executes the network with the given input tensors and checks the results against the given output tensors.
63 /// This overload supports multiple inputs and multiple outputs, identified by name.
kevmay012b4d88e2019-01-24 14:05:09 +000064 template <std::size_t NumOutputDimensions, typename T = float>
telsoa014fcda012018-03-09 14:13:49 +000065 void RunTest(const std::map<std::string, std::vector<float>>& inputData,
kevmay012b4d88e2019-01-24 14:05:09 +000066 const std::map<std::string, std::vector<T>>& expectedOutputData);
telsoa014fcda012018-03-09 14:13:49 +000067
surmeh013537c2c2018-05-18 16:31:43 +010068 std::string m_Prototext;
69 std::unique_ptr<TParser, void(*)(TParser* parser)> m_Parser;
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +000070 armnn::IRuntimePtr m_Runtime;
surmeh013537c2c2018-05-18 16:31:43 +010071 armnn::NetworkId m_NetworkIdentifier;
telsoa014fcda012018-03-09 14:13:49 +000072
73 /// If the single-input-single-output overload of Setup() is called, these will store the input and output name
74 /// so they don't need to be passed to the single-input-single-output overload of RunTest().
75 /// @{
76 std::string m_SingleInputName;
77 std::string m_SingleOutputName;
78 /// @}
Ferran Balaguer51dd62f2019-01-11 19:29:18 +000079
80 /// This will store the output shape so it don't need to be passed to the single-input-single-output overload
81 /// of RunTest().
82 armnn::TensorShape m_SingleOutputShape;
telsoa014fcda012018-03-09 14:13:49 +000083};
84
85template<typename TParser>
86void ParserPrototxtFixture<TParser>::SetupSingleInputSingleOutput(const std::string& inputName,
87 const std::string& outputName)
88{
telsoa01c577f2c2018-08-31 09:22:23 +010089 // Stores the input and output name so they don't need to be passed to the single-input-single-output RunTest().
telsoa014fcda012018-03-09 14:13:49 +000090 m_SingleInputName = inputName;
91 m_SingleOutputName = outputName;
92 Setup({ }, { outputName });
93}
94
95template<typename TParser>
96void ParserPrototxtFixture<TParser>::SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
97 const std::string& inputName,
98 const std::string& outputName)
99{
telsoa01c577f2c2018-08-31 09:22:23 +0100100 // Stores the input and output name so they don't need to be passed to the single-input-single-output RunTest().
telsoa014fcda012018-03-09 14:13:49 +0000101 m_SingleInputName = inputName;
102 m_SingleOutputName = outputName;
103 Setup({ { inputName, inputTensorShape } }, { outputName });
104}
105
106template<typename TParser>
Ferran Balaguer51dd62f2019-01-11 19:29:18 +0000107void ParserPrototxtFixture<TParser>::SetupSingleInputSingleOutput(const armnn::TensorShape& inputTensorShape,
108 const armnn::TensorShape& outputTensorShape,
109 const std::string& inputName,
110 const std::string& outputName)
111{
112 // Stores the input name, the output name and the output tensor shape
113 // so they don't need to be passed to the single-input-single-output RunTest().
114 m_SingleInputName = inputName;
115 m_SingleOutputName = outputName;
116 m_SingleOutputShape = outputTensorShape;
117 Setup({ { inputName, inputTensorShape } }, { outputName });
118}
119
120template<typename TParser>
telsoa014fcda012018-03-09 14:13:49 +0000121void ParserPrototxtFixture<TParser>::Setup(const std::map<std::string, armnn::TensorShape>& inputShapes,
122 const std::vector<std::string>& requestedOutputs)
123{
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000124 std::string errorMessage;
telsoa01c577f2c2018-08-31 09:22:23 +0100125
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000126 armnn::INetworkPtr network =
127 m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes, requestedOutputs);
128 auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
129 armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
130 if (ret != armnn::Status::Success)
131 {
132 throw armnn::Exception(boost::str(
133 boost::format("LoadNetwork failed with error: '%1%' %2%")
134 % errorMessage
135 % CHECK_LOCATION().AsString()));
telsoa01c577f2c2018-08-31 09:22:23 +0100136 }
137}
138
139template<typename TParser>
140void ParserPrototxtFixture<TParser>::Setup()
141{
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000142 std::string errorMessage;
telsoa01c577f2c2018-08-31 09:22:23 +0100143
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000144 armnn::INetworkPtr network =
145 m_Parser->CreateNetworkFromString(m_Prototext.c_str());
146 auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
147 armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
148 if (ret != armnn::Status::Success)
149 {
150 throw armnn::Exception(boost::str(
151 boost::format("LoadNetwork failed with error: '%1%' %2%")
152 % errorMessage
153 % CHECK_LOCATION().AsString()));
telsoa014fcda012018-03-09 14:13:49 +0000154 }
155}
156
157template<typename TParser>
narpra016f37f832018-12-21 18:30:00 +0000158armnn::IOptimizedNetworkPtr ParserPrototxtFixture<TParser>::SetupOptimizedNetwork(
159 const std::map<std::string,armnn::TensorShape>& inputShapes,
160 const std::vector<std::string>& requestedOutputs)
161{
162 armnn::INetworkPtr network =
163 m_Parser->CreateNetworkFromString(m_Prototext.c_str(), inputShapes, requestedOutputs);
164 auto optimized = Optimize(*network, { armnn::Compute::CpuRef }, m_Runtime->GetDeviceSpec());
165 return optimized;
166}
167
168template<typename TParser>
telsoa014fcda012018-03-09 14:13:49 +0000169template <std::size_t NumOutputDimensions>
170void ParserPrototxtFixture<TParser>::RunTest(const std::vector<float>& inputData,
kevmay012b4d88e2019-01-24 14:05:09 +0000171 const std::vector<float>& expectedOutputData)
telsoa014fcda012018-03-09 14:13:49 +0000172{
173 RunTest<NumOutputDimensions>({ { m_SingleInputName, inputData } }, { { m_SingleOutputName, expectedOutputData } });
174}
175
176template<typename TParser>
177template <std::size_t NumOutputDimensions>
kevmay012b4d88e2019-01-24 14:05:09 +0000178void ParserPrototxtFixture<TParser>::RunComparisonTest(const std::map<std::string, std::vector<float>>& inputData,
179 const std::map<std::string, std::vector<uint8_t>>&
180 expectedOutputData)
181{
182 RunTest<NumOutputDimensions, uint8_t>(inputData, expectedOutputData);
183}
184
185template<typename TParser>
186template <std::size_t NumOutputDimensions, typename T>
telsoa014fcda012018-03-09 14:13:49 +0000187void ParserPrototxtFixture<TParser>::RunTest(const std::map<std::string, std::vector<float>>& inputData,
kevmay012b4d88e2019-01-24 14:05:09 +0000188 const std::map<std::string, std::vector<T>>& expectedOutputData)
telsoa014fcda012018-03-09 14:13:49 +0000189{
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000190 using BindingPointInfo = std::pair<armnn::LayerBindingId, armnn::TensorInfo>;
191
192 // Sets up the armnn input tensors from the given vectors.
193 armnn::InputTensors inputTensors;
194 for (auto&& it : inputData)
telsoa014fcda012018-03-09 14:13:49 +0000195 {
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000196 BindingPointInfo bindingInfo = m_Parser->GetNetworkInputBindingInfo(it.first);
197 inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) });
198 }
telsoa014fcda012018-03-09 14:13:49 +0000199
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000200 // Allocates storage for the output tensors to be written to and sets up the armnn output tensors.
kevmay012b4d88e2019-01-24 14:05:09 +0000201 std::map<std::string, boost::multi_array<T, NumOutputDimensions>> outputStorage;
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000202 armnn::OutputTensors outputTensors;
203 for (auto&& it : expectedOutputData)
204 {
205 BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first);
kevmay012b4d88e2019-01-24 14:05:09 +0000206 outputStorage.emplace(it.first, MakeTensor<T, NumOutputDimensions>(bindingInfo.second));
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000207 outputTensors.push_back(
208 { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) });
209 }
210
211 m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
212
213 // Compares each output tensor to the expected values.
214 for (auto&& it : expectedOutputData)
215 {
216 BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(it.first);
217 if (bindingInfo.second.GetNumElements() != it.second.size())
surmeh013537c2c2018-05-18 16:31:43 +0100218 {
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000219 throw armnn::Exception(
Ferran Balaguer51dd62f2019-01-11 19:29:18 +0000220 boost::str(boost::format("Output tensor %1% is expected to have %2% elements. "
221 "%3% elements supplied. %4%") %
222 it.first %
223 bindingInfo.second.GetNumElements() %
224 it.second.size() %
225 CHECK_LOCATION().AsString()));
surmeh013537c2c2018-05-18 16:31:43 +0100226 }
Ferran Balaguer51dd62f2019-01-11 19:29:18 +0000227
228 // If the expected output shape is set, the output tensor checks will be carried out.
229 if (m_SingleOutputShape.GetNumDimensions() != 0)
230 {
231
232 if (bindingInfo.second.GetShape().GetNumDimensions() == NumOutputDimensions &&
233 bindingInfo.second.GetShape().GetNumDimensions() == m_SingleOutputShape.GetNumDimensions())
234 {
235 for (unsigned int i = 0; i < m_SingleOutputShape.GetNumDimensions(); ++i)
236 {
237 if (m_SingleOutputShape[i] != bindingInfo.second.GetShape()[i])
238 {
239 throw armnn::Exception(
240 boost::str(boost::format("Output tensor %1% is expected to have %2% shape. "
241 "%3% shape supplied. %4%") %
242 it.first %
243 bindingInfo.second.GetShape() %
244 m_SingleOutputShape %
245 CHECK_LOCATION().AsString()));
246 }
247 }
248 }
249 else
250 {
251 throw armnn::Exception(
252 boost::str(boost::format("Output tensor %1% is expected to have %2% dimensions. "
253 "%3% dimensions supplied. %4%") %
254 it.first %
255 bindingInfo.second.GetShape().GetNumDimensions() %
256 NumOutputDimensions %
257 CHECK_LOCATION().AsString()));
258 }
259 }
260
kevmay012b4d88e2019-01-24 14:05:09 +0000261 auto outputExpected = MakeTensor<T, NumOutputDimensions>(bindingInfo.second, it.second);
262 if (std::is_same<T, uint8_t>::value)
263 {
264 BOOST_TEST(CompareTensors(outputExpected, outputStorage[it.first], true));
265 }
266 else
267 {
268 BOOST_TEST(CompareTensors(outputExpected, outputStorage[it.first]));
269 }
telsoa014fcda012018-03-09 14:13:49 +0000270 }
271}
telsoa01c577f2c2018-08-31 09:22:23 +0100272
273} // namespace armnnUtils