blob: 97bec5164566624fcb483e1c11df3283cfc9c5b8 [file] [log] [blame]
Aron Virginas-Tar70104002018-10-24 15:33:28 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <backendsCommon/test/EndToEndTestImpl.hpp>
narpra01b9546cf2018-11-20 15:21:28 +00007#include <backendsCommon/test/MergerTestImpl.hpp>
Aron Virginas-Tar70104002018-10-24 15:33:28 +01008
9#include <boost/test/unit_test.hpp>
10
11BOOST_AUTO_TEST_SUITE(RefEndToEnd)
12
narpra01b9546cf2018-11-20 15:21:28 +000013std::vector<armnn::BackendId> defaultBackends = {armnn::Compute::CpuRef};
14
Aron Virginas-Tar70104002018-10-24 15:33:28 +010015BOOST_AUTO_TEST_CASE(ConstantUsage_Ref_Float32)
16{
narpra01b9546cf2018-11-20 15:21:28 +000017 BOOST_TEST(ConstantUsageFloat32Test(defaultBackends));
Aron Virginas-Tar70104002018-10-24 15:33:28 +010018}
19
20BOOST_AUTO_TEST_CASE(ConstantUsage_Ref_Uint8)
21{
narpra01b9546cf2018-11-20 15:21:28 +000022 BOOST_TEST(ConstantUsageUint8Test(defaultBackends));
Aron Virginas-Tar70104002018-10-24 15:33:28 +010023}
24
25BOOST_AUTO_TEST_CASE(Unsigned8)
26{
27 using namespace armnn;
28
29 // Create runtime in which test will run
30 armnn::IRuntime::CreationOptions options;
31 armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));
32
33 // Builds up the structure of the network.
34 armnn::INetworkPtr net(INetwork::Create());
35
36 IConnectableLayer* input = net->AddInputLayer(0, "input");
37 IConnectableLayer* softmax = net->AddSoftmaxLayer(SoftmaxDescriptor(), "softmax");
38 IConnectableLayer* output = net->AddOutputLayer(0, "output");
39
40 input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
41 softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
42
43 // Sets the tensors in the network.
44 TensorInfo inputTensorInfo(TensorShape({1, 5}), DataType::QuantisedAsymm8);
45 inputTensorInfo.SetQuantizationOffset(100);
46 inputTensorInfo.SetQuantizationScale(10000.0f);
47 input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
48
49 TensorInfo outputTensorInfo(TensorShape({1, 5}), DataType::QuantisedAsymm8);
50 outputTensorInfo.SetQuantizationOffset(0);
51 outputTensorInfo.SetQuantizationScale(1.0f/255.0f);
52 softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
53
54 // optimize the network
narpra01b9546cf2018-11-20 15:21:28 +000055 IOptimizedNetworkPtr optNet = Optimize(*net, defaultBackends, runtime->GetDeviceSpec());
Aron Virginas-Tar70104002018-10-24 15:33:28 +010056
57 // Loads it into the runtime.
58 NetworkId netId;
59 auto error = runtime->LoadNetwork(netId, std::move(optNet));
60 BOOST_TEST(error == Status::Success);
61
62 // Creates structures for input & output.
63 std::vector<uint8_t> inputData
64 {
65 1, 10, 3, 200, 5 // Some inputs - one of which is sufficiently larger than the others to saturate softmax.
66 };
67 std::vector<uint8_t> outputData(5);
68
69 armnn::InputTensors inputTensors
70 {
71 {0, armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
72 };
73 armnn::OutputTensors outputTensors
74 {
75 {0, armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
76 };
77
78 // Does the inference.
79 runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
80
81 // Checks the results.
82 BOOST_TEST(outputData[0] == 0);
83 BOOST_TEST(outputData[1] == 0);
84 BOOST_TEST(outputData[2] == 0);
85 BOOST_TEST(outputData[3] == 255); // softmax has been saturated.
86 BOOST_TEST(outputData[4] == 0);
87}
88
89BOOST_AUTO_TEST_CASE(TrivialAdd)
90{
91 // This test was designed to match "AddTwo" in android nn/runtime/test/TestTrivialModel.cpp.
92
93 using namespace armnn;
94
95 // Create runtime in which test will run
96 armnn::IRuntime::CreationOptions options;
97 armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));
98
99 // Builds up the structure of the network.
100 armnn::INetworkPtr net(INetwork::Create());
101
102 IConnectableLayer* input1 = net->AddInputLayer(0);
103 IConnectableLayer* input2 = net->AddInputLayer(1);
104 IConnectableLayer* add = net->AddAdditionLayer();
105 IConnectableLayer* output = net->AddOutputLayer(0);
106
107 input1->GetOutputSlot(0).Connect(add->GetInputSlot(0));
108 input2->GetOutputSlot(0).Connect(add->GetInputSlot(1));
109 add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
110
111 // Sets the tensors in the network.
112 TensorInfo tensorInfo(TensorShape({3, 4}), DataType::Float32);
113 input1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
114 input2->GetOutputSlot(0).SetTensorInfo(tensorInfo);
115 add->GetOutputSlot(0).SetTensorInfo(tensorInfo);
116
117 // optimize the network
narpra01b9546cf2018-11-20 15:21:28 +0000118 IOptimizedNetworkPtr optNet = Optimize(*net, defaultBackends, runtime->GetDeviceSpec());
Aron Virginas-Tar70104002018-10-24 15:33:28 +0100119
120 // Loads it into the runtime.
121 NetworkId netId;
122 runtime->LoadNetwork(netId, std::move(optNet));
123
124 // Creates structures for input & output - matching android nn test.
125 std::vector<float> input1Data
126 {
127 1.f, 2.f, 3.f, 4.f, 5.f, 6.f, 7.f, 8.f, 9.f, 10.f, 11.f, 12.f
128 };
129 std::vector<float> input2Data
130 {
131 100.f, 200.f, 300.f, 400.f, 500.f, 600.f, 700.f, 800.f, 900.f, 1000.f, 1100.f, 1200.f
132 };
133 std::vector<float> outputData(12);
134
135 InputTensors inputTensors
136 {
137 {0,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), input1Data.data())},
138 {1,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), input2Data.data())}
139 };
140 OutputTensors outputTensors
141 {
142 {0,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
143 };
144
145 // Does the inference.
146 runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
147
148 // Checks the results
149 BOOST_TEST(outputData[0] == 101);
150 BOOST_TEST(outputData[1] == 202);
151 BOOST_TEST(outputData[2] == 303);
152 BOOST_TEST(outputData[3] == 404);
153 BOOST_TEST(outputData[4] == 505);
154 BOOST_TEST(outputData[5] == 606);
155 BOOST_TEST(outputData[6] == 707);
156 BOOST_TEST(outputData[7] == 808);
157 BOOST_TEST(outputData[8] == 909);
158 BOOST_TEST(outputData[9] == 1010);
159 BOOST_TEST(outputData[10] == 1111);
160 BOOST_TEST(outputData[11] == 1212);
161}
162
163BOOST_AUTO_TEST_CASE(MultipleOutputs)
164{
165 using namespace armnn;
166
167 // Create runtime in which test will run
168 armnn::IRuntime::CreationOptions options;
169 armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));
170
171 // Builds up the structure of the network.
172 INetworkPtr net(INetwork::Create());
173
174 IConnectableLayer* input = net->AddInputLayer(0);
175
176 // ReLu1
177 ActivationDescriptor activation1Descriptor;
178 activation1Descriptor.m_Function = ActivationFunction::BoundedReLu;
179 activation1Descriptor.m_A = 1.f;
180 activation1Descriptor.m_B = -1.f;
181 IConnectableLayer* activation1 = net->AddActivationLayer(activation1Descriptor);
182
183 // ReLu6
184 ActivationDescriptor activation2Descriptor;
185 activation2Descriptor.m_Function = ActivationFunction::BoundedReLu;
186 activation2Descriptor.m_A = 6.0f;
187 IConnectableLayer* activation2 = net->AddActivationLayer(activation2Descriptor);
188
189 // BoundedReLu(min=2, max=5)
190 ActivationDescriptor activation3Descriptor;
191 activation3Descriptor.m_Function = ActivationFunction::BoundedReLu;
192 activation3Descriptor.m_A = 5.0f;
193 activation3Descriptor.m_B = 2.0f;
194 IConnectableLayer* activation3 = net->AddActivationLayer(activation3Descriptor);
195
196 IConnectableLayer* output1 = net->AddOutputLayer(0);
197 IConnectableLayer* output2 = net->AddOutputLayer(1);
198 IConnectableLayer* output3 = net->AddOutputLayer(2);
199
200 input->GetOutputSlot(0).Connect(activation1->GetInputSlot(0));
201 input->GetOutputSlot(0).Connect(activation2->GetInputSlot(0));
202 input->GetOutputSlot(0).Connect(activation3->GetInputSlot(0));
203
204 activation1->GetOutputSlot(0).Connect(output1->GetInputSlot(0));
205 activation2->GetOutputSlot(0).Connect(output2->GetInputSlot(0));
206 activation3->GetOutputSlot(0).Connect(output3->GetInputSlot(0));
207
208 // Sets the tensors in the network.
209 TensorInfo tensorInfo(TensorShape({ 10 }), DataType::Float32);
210 input->GetOutputSlot(0).SetTensorInfo(tensorInfo);
211 activation1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
212 activation2->GetOutputSlot(0).SetTensorInfo(tensorInfo);
213 activation3->GetOutputSlot(0).SetTensorInfo(tensorInfo);
214
215 // optimize the network
narpra01b9546cf2018-11-20 15:21:28 +0000216 IOptimizedNetworkPtr optNet = Optimize(*net, defaultBackends, runtime->GetDeviceSpec());
Aron Virginas-Tar70104002018-10-24 15:33:28 +0100217
218 // Loads it into the runtime.
219 NetworkId netId;
220 runtime->LoadNetwork(netId, std::move(optNet));
221
222 // Creates structures for input & output.
223 const std::vector<float> inputData{ 3.f, 5.f, 2.f, 3.f, 7.f, 0.f, -2.f, -1.f, 3.f, 3.f };
224
225 std::vector<float> output1Data(inputData.size());
226 std::vector<float> output2Data(inputData.size());
227 std::vector<float> output3Data(inputData.size());
228
229 InputTensors inputTensors
230 {
231 {0,armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
232 };
233 OutputTensors outputTensors
234 {
235 {0,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), output1Data.data())},
236 {1,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 1), output2Data.data())},
237 {2,armnn::Tensor(runtime->GetOutputTensorInfo(netId, 2), output3Data.data())}
238 };
239
240 // Does the inference.
241 runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
242
243 // Checks the results.
244 BOOST_TEST(output1Data == std::vector<float>({ 1.f, 1.f, 1.f, 1.f, 1.f, 0.f, -1.f, -1.f, 1.f, 1.f })); // ReLu1
245 BOOST_TEST(output2Data == std::vector<float>({ 3.f, 5.f, 2.f, 3.f, 6.f, 0.f, 0.f, 0.f, 3.f, 3.f })); // ReLu6
246 BOOST_TEST(output3Data == std::vector<float>({ 3.f, 5.f, 2.f, 3.f, 5.f, 2.f, 2.f, 2.f, 3.f, 3.f })); // [2, 5]
247}
248
narpra01b9546cf2018-11-20 15:21:28 +0000249BOOST_AUTO_TEST_CASE(RefMergerEndToEndDim0Test)
250{
251 MergerDim0EndToEnd<float>(defaultBackends);
252}
253
254BOOST_AUTO_TEST_CASE(RefMergerEndToEndDim0Uint8Test)
255{
256 MergerDim0EndToEnd<uint8_t>(defaultBackends);
257}
258
259BOOST_AUTO_TEST_CASE(RefMergerEndToEndDim1Test)
260{
261 MergerDim1EndToEnd<float>(defaultBackends);
262}
263
264BOOST_AUTO_TEST_CASE(RefMergerEndToEndDim1Uint8Test)
265{
266 MergerDim1EndToEnd<uint8_t>(defaultBackends);
267}
268
269BOOST_AUTO_TEST_CASE(RefMergerEndToEndDim2Test)
270{
271 MergerDim2EndToEnd<float>(defaultBackends);
272}
273
274BOOST_AUTO_TEST_CASE(RefMergerEndToEndDim2Uint8Test)
275{
276 MergerDim2EndToEnd<uint8_t>(defaultBackends);
277}
278
279BOOST_AUTO_TEST_CASE(RefMergerEndToEndDim3Test)
280{
281 MergerDim3EndToEnd<float>(defaultBackends);
282}
283
284BOOST_AUTO_TEST_CASE(RefMergerEndToEndDim3Uint8Test)
285{
286 MergerDim3EndToEnd<uint8_t>(defaultBackends);
287}
288
Aron Virginas-Tar70104002018-10-24 15:33:28 +0100289BOOST_AUTO_TEST_SUITE_END()