blob: 9071679e6ab6913bb35d901a81316db3fcf8f789 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
arovir0143095f32018-10-09 18:04:24 +01005
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <test/CreateWorkload.hpp>
arovir0143095f32018-10-09 18:04:24 +01007
Matthew Bentham4cefc412019-06-18 16:14:34 +01008#include <reference/RefTensorHandle.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <reference/RefWorkloadFactory.hpp>
10#include <reference/workloads/RefWorkloads.hpp>
telsoa014fcda012018-03-09 14:13:49 +000011
12namespace
13{
14
15template<typename Workload>
16void CheckInputOutput(std::unique_ptr<Workload> workload, const TensorInfo& inputInfo, const TensorInfo& outputInfo)
17{
18 auto queueDescriptor = workload->GetData();
Matthew Bentham4cefc412019-06-18 16:14:34 +010019 auto inputHandle = boost::polymorphic_downcast<RefTensorHandle*>(queueDescriptor.m_Inputs[0]);
20 auto outputHandle = boost::polymorphic_downcast<RefTensorHandle*>(queueDescriptor.m_Outputs[0]);
telsoa014fcda012018-03-09 14:13:49 +000021 BOOST_TEST((inputHandle->GetTensorInfo() == inputInfo));
22 BOOST_TEST((outputHandle->GetTensorInfo() == outputInfo));
23}
24
25template <typename Workload>
26void CheckInputsOutput(std::unique_ptr<Workload> workload,
27 const TensorInfo& inputInfo0,
28 const TensorInfo& inputInfo1,
29 const TensorInfo& outputInfo)
30{
31 auto queueDescriptor = workload->GetData();
Matthew Bentham4cefc412019-06-18 16:14:34 +010032 auto inputHandle0 = boost::polymorphic_downcast<RefTensorHandle*>(queueDescriptor.m_Inputs[0]);
33 auto inputHandle1 = boost::polymorphic_downcast<RefTensorHandle*>(queueDescriptor.m_Inputs[1]);
34 auto outputHandle = boost::polymorphic_downcast<RefTensorHandle*>(queueDescriptor.m_Outputs[0]);
telsoa014fcda012018-03-09 14:13:49 +000035 BOOST_TEST((inputHandle0->GetTensorInfo() == inputInfo0));
36 BOOST_TEST((inputHandle1->GetTensorInfo() == inputInfo1));
37 BOOST_TEST((outputHandle->GetTensorInfo() == outputInfo));
38}
39}
40
41BOOST_AUTO_TEST_SUITE(CreateWorkloadRef)
42
telsoa01c577f2c2018-08-31 09:22:23 +010043template <typename ActivationWorkloadType, armnn::DataType DataType>
telsoa014fcda012018-03-09 14:13:49 +000044static void RefCreateActivationWorkloadTest()
45{
46 Graph graph;
47 RefWorkloadFactory factory;
telsoa01c577f2c2018-08-31 09:22:23 +010048 auto workload = CreateActivationWorkloadTest<ActivationWorkloadType, DataType>(factory, graph);
telsoa014fcda012018-03-09 14:13:49 +000049
telsoa01c577f2c2018-08-31 09:22:23 +010050 // Checks that outputs are as we expect them (see definition of CreateActivationWorkloadTest).
telsoa014fcda012018-03-09 14:13:49 +000051 CheckInputOutput(std::move(workload),
telsoa01c577f2c2018-08-31 09:22:23 +010052 TensorInfo({ 1, 1 }, DataType),
53 TensorInfo({ 1, 1 }, DataType));
telsoa014fcda012018-03-09 14:13:49 +000054}
55
56BOOST_AUTO_TEST_CASE(CreateActivationFloat32Workload)
57{
Nattapat Chaimanowongae2c5f02019-04-24 16:19:57 +010058 RefCreateActivationWorkloadTest<RefActivationWorkload, armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +000059}
60
61BOOST_AUTO_TEST_CASE(CreateActivationUint8Workload)
62{
Nattapat Chaimanowongae2c5f02019-04-24 16:19:57 +010063 RefCreateActivationWorkloadTest<RefActivationWorkload, armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +000064}
65
David Beckbc392452018-09-10 14:47:28 +010066template <typename WorkloadType,
67 typename DescriptorType,
68 typename LayerType,
69 armnn::DataType DataType>
Éanna Ó Catháind57415d2018-11-28 16:24:38 +000070static void RefCreateElementwiseWorkloadTest()
telsoa014fcda012018-03-09 14:13:49 +000071{
72 Graph graph;
73 RefWorkloadFactory factory;
Éanna Ó Catháind57415d2018-11-28 16:24:38 +000074 auto workload = CreateElementwiseWorkloadTest<WorkloadType, DescriptorType, LayerType, DataType>(
75 factory, graph);
telsoa014fcda012018-03-09 14:13:49 +000076
telsoa014fcda012018-03-09 14:13:49 +000077 CheckInputsOutput(std::move(workload),
telsoa01c577f2c2018-08-31 09:22:23 +010078 TensorInfo({ 2, 3 }, DataType),
79 TensorInfo({ 2, 3 }, DataType),
80 TensorInfo({ 2, 3 }, DataType));
telsoa014fcda012018-03-09 14:13:49 +000081}
82
83BOOST_AUTO_TEST_CASE(CreateAdditionFloatWorkload)
84{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +010085 RefCreateElementwiseWorkloadTest<RefAdditionWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +000086 AdditionQueueDescriptor,
87 AdditionLayer,
88 armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +000089}
90
91BOOST_AUTO_TEST_CASE(CreateAdditionUint8Workload)
92{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +010093 RefCreateElementwiseWorkloadTest<RefAdditionWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +000094 AdditionQueueDescriptor,
95 AdditionLayer,
96 armnn::DataType::QuantisedAsymm8>();
David Beckbc392452018-09-10 14:47:28 +010097}
98
Sadik Armagan2999a022019-04-09 14:20:12 +010099BOOST_AUTO_TEST_CASE(CreateAdditionInt16Workload)
100{
101 RefCreateElementwiseWorkloadTest<RefAdditionWorkload,
102 AdditionQueueDescriptor,
103 AdditionLayer,
104 armnn::DataType::QuantisedSymm16>();
105}
106
David Beckbc392452018-09-10 14:47:28 +0100107BOOST_AUTO_TEST_CASE(CreateSubtractionFloatWorkload)
108{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +0100109 RefCreateElementwiseWorkloadTest<RefSubtractionWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +0000110 SubtractionQueueDescriptor,
111 SubtractionLayer,
112 armnn::DataType::Float32>();
David Beckbc392452018-09-10 14:47:28 +0100113}
114
115BOOST_AUTO_TEST_CASE(CreateSubtractionUint8Workload)
116{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +0100117 RefCreateElementwiseWorkloadTest<RefSubtractionWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +0000118 SubtractionQueueDescriptor,
119 SubtractionLayer,
120 armnn::DataType::QuantisedAsymm8>();
David Beckbc392452018-09-10 14:47:28 +0100121}
122
Sadik Armagan2999a022019-04-09 14:20:12 +0100123BOOST_AUTO_TEST_CASE(CreateSubtractionInt16Workload)
124{
125 RefCreateElementwiseWorkloadTest<RefSubtractionWorkload,
126 SubtractionQueueDescriptor,
127 SubtractionLayer,
128 armnn::DataType::QuantisedSymm16>();
129}
130
David Beckbc392452018-09-10 14:47:28 +0100131BOOST_AUTO_TEST_CASE(CreateMultiplicationFloatWorkload)
132{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +0100133 RefCreateElementwiseWorkloadTest<RefMultiplicationWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +0000134 MultiplicationQueueDescriptor,
135 MultiplicationLayer,
136 armnn::DataType::Float32>();
David Beckbc392452018-09-10 14:47:28 +0100137}
138
139BOOST_AUTO_TEST_CASE(CreateMultiplicationUint8Workload)
140{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +0100141 RefCreateElementwiseWorkloadTest<RefMultiplicationWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +0000142 MultiplicationQueueDescriptor,
143 MultiplicationLayer,
144 armnn::DataType::QuantisedAsymm8>();
David Beckbc392452018-09-10 14:47:28 +0100145}
146
Sadik Armagan2999a022019-04-09 14:20:12 +0100147BOOST_AUTO_TEST_CASE(CreateMultiplicationInt16Workload)
148{
149 RefCreateElementwiseWorkloadTest<RefMultiplicationWorkload,
150 MultiplicationQueueDescriptor,
151 MultiplicationLayer,
152 armnn::DataType::QuantisedSymm16>();
153}
154
David Beckbc392452018-09-10 14:47:28 +0100155BOOST_AUTO_TEST_CASE(CreateDivisionFloatWorkload)
156{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +0100157 RefCreateElementwiseWorkloadTest<RefDivisionWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +0000158 DivisionQueueDescriptor,
159 DivisionLayer,
160 armnn::DataType::Float32>();
David Beckbc392452018-09-10 14:47:28 +0100161}
162
163BOOST_AUTO_TEST_CASE(CreateDivisionUint8Workload)
164{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +0100165 RefCreateElementwiseWorkloadTest<RefDivisionWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +0000166 DivisionQueueDescriptor,
167 DivisionLayer,
168 armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000169}
170
Sadik Armagan2999a022019-04-09 14:20:12 +0100171BOOST_AUTO_TEST_CASE(CreateDivisionInt16Workload)
172{
173 RefCreateElementwiseWorkloadTest<RefDivisionWorkload,
174 DivisionQueueDescriptor,
175 DivisionLayer,
176 armnn::DataType::QuantisedSymm16>();
177}
178
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100179template <typename BatchNormalizationWorkloadType, armnn::DataType DataType>
180static void RefCreateBatchNormalizationWorkloadTest(DataLayout dataLayout)
telsoa014fcda012018-03-09 14:13:49 +0000181{
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100182 Graph graph;
telsoa014fcda012018-03-09 14:13:49 +0000183 RefWorkloadFactory factory;
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100184 auto workload = CreateBatchNormalizationWorkloadTest<BatchNormalizationWorkloadType, DataType>(factory,
185 graph,
186 dataLayout);
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100187
188 TensorShape inputShape;
189 TensorShape outputShape;
190
191 switch (dataLayout)
192 {
193 case DataLayout::NHWC:
Nikhil Rajd1340932018-10-18 14:27:50 +0100194 inputShape = { 2, 4, 4, 3 };
195 outputShape = { 2, 4, 4, 3 };
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100196 break;
197 case DataLayout::NCHW:
198 default:
Nikhil Rajd1340932018-10-18 14:27:50 +0100199 inputShape = { 2, 3, 4, 4 };
200 outputShape = { 2, 3, 4, 4 };
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100201 break;
202 }
telsoa014fcda012018-03-09 14:13:49 +0000203
telsoa01c577f2c2018-08-31 09:22:23 +0100204 // Checks that outputs and inputs are as we expect them (see definition of CreateBatchNormalizationWorkloadTest).
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100205 CheckInputOutput(std::move(workload), TensorInfo(inputShape, DataType), TensorInfo(outputShape, DataType));
206}
207
208BOOST_AUTO_TEST_CASE(CreateBatchNormalizationFloat32Workload)
209{
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100210 RefCreateBatchNormalizationWorkloadTest<RefBatchNormalizationWorkload,armnn::DataType::Float32>
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100211 (DataLayout::NCHW);
212}
213
214BOOST_AUTO_TEST_CASE(CreateBatchNormalizationFloat32WorkloadNhwc)
215{
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100216 RefCreateBatchNormalizationWorkloadTest<RefBatchNormalizationWorkload, armnn::DataType::Float32>
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100217 (DataLayout::NHWC);
218}
219
220BOOST_AUTO_TEST_CASE(CreateBatchNormalizationUint8Workload)
221{
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100222 RefCreateBatchNormalizationWorkloadTest<RefBatchNormalizationWorkload, armnn::DataType::QuantisedAsymm8>
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100223 (DataLayout::NCHW);
224}
225
226BOOST_AUTO_TEST_CASE(CreateBatchNormalizationUint8WorkloadNhwc)
227{
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100228 RefCreateBatchNormalizationWorkloadTest<RefBatchNormalizationWorkload, armnn::DataType::QuantisedAsymm8>
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100229 (DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000230}
231
Matteo Martincighf5507132019-06-04 10:59:47 +0100232BOOST_AUTO_TEST_CASE(CreateBatchNormalizationInt16Workload)
233{
234 RefCreateBatchNormalizationWorkloadTest<RefBatchNormalizationWorkload, armnn::DataType::QuantisedSymm16>
235 (DataLayout::NCHW);
236}
237
238BOOST_AUTO_TEST_CASE(CreateBatchNormalizationInt16WorkloadNhwc)
239{
240 RefCreateBatchNormalizationWorkloadTest<RefBatchNormalizationWorkload, armnn::DataType::QuantisedSymm16>
241 (DataLayout::NHWC);
242}
243
telsoa01c577f2c2018-08-31 09:22:23 +0100244BOOST_AUTO_TEST_CASE(CreateConvertFp16ToFp32Float32Workload)
245{
246 Graph graph;
247 RefWorkloadFactory factory;
248 auto workload = CreateConvertFp16ToFp32WorkloadTest<RefConvertFp16ToFp32Workload>(factory, graph);
249
250 // Checks that outputs and inputs are as we expect them
251 CheckInputOutput(
252 std::move(workload), TensorInfo({1, 3, 2, 3}, DataType::Float16), TensorInfo({1, 3, 2, 3}, DataType::Float32));
253}
254
255BOOST_AUTO_TEST_CASE(CreateConvertFp32ToFp16Float16Workload)
256{
257 Graph graph;
258 RefWorkloadFactory factory;
259 auto workload = CreateConvertFp32ToFp16WorkloadTest<RefConvertFp32ToFp16Workload>(factory, graph);
260
261 // Checks that outputs and inputs are as we expect them
262 CheckInputOutput(
263 std::move(workload), TensorInfo({1, 3, 2, 3}, DataType::Float32), TensorInfo({1, 3, 2, 3}, DataType::Float16));
264}
265
Nikhil Raje4dfd6e2018-10-18 10:11:04 +0100266static void RefCreateConvolution2dWorkloadTest(DataLayout dataLayout = DataLayout::NCHW)
telsoa014fcda012018-03-09 14:13:49 +0000267{
Nikhil Raje4dfd6e2018-10-18 10:11:04 +0100268 Graph graph;
telsoa014fcda012018-03-09 14:13:49 +0000269 RefWorkloadFactory factory;
Mike Kelly9b398322019-05-22 17:21:49 +0100270 auto workload = CreateConvolution2dWorkloadTest<RefConvolution2dWorkload, DataType::Float32>
Nikhil Raje4dfd6e2018-10-18 10:11:04 +0100271 (factory, graph, dataLayout);
272
Mike Kellydb482882019-06-14 12:35:24 +0100273 TensorShape inputShape = (dataLayout == DataLayout::NCHW) ? std::initializer_list<unsigned int>({2, 3, 8, 16})
274 : std::initializer_list<unsigned int>({2, 8, 16, 3});
275 TensorShape outputShape = (dataLayout == DataLayout::NCHW) ? std::initializer_list<unsigned int>({2, 2, 2, 10})
276 : std::initializer_list<unsigned int>({2, 2, 10, 2});
telsoa014fcda012018-03-09 14:13:49 +0000277
telsoa01c577f2c2018-08-31 09:22:23 +0100278 // Checks that outputs and inputs are as we expect them (see definition of CreateConvolution2dWorkloadTest).
telsoa014fcda012018-03-09 14:13:49 +0000279 CheckInputOutput(std::move(workload),
Nikhil Raje4dfd6e2018-10-18 10:11:04 +0100280 TensorInfo(inputShape, DataType::Float32),
281 TensorInfo(outputShape, DataType::Float32));
282}
283
284BOOST_AUTO_TEST_CASE(CreateConvolution2dFloatNchwWorkload)
285{
286 RefCreateConvolution2dWorkloadTest(DataLayout::NCHW);
287}
288
289BOOST_AUTO_TEST_CASE(CreateConvolution2dFloatNhwcWorkload)
290{
291 RefCreateConvolution2dWorkloadTest(DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000292}
293
Ruomei Yan495852f2019-05-23 11:37:33 +0100294static void RefCreateDepthwiseConvolutionWorkloadTest(DataLayout dataLayout)
295{
296 Graph graph;
297 RefWorkloadFactory factory;
298 auto workload = CreateDepthwiseConvolution2dWorkloadTest<RefDepthwiseConvolution2dWorkload, DataType::Float32>
299 (factory, graph, dataLayout);
300
Mike Kellydb482882019-06-14 12:35:24 +0100301 TensorShape inputShape = (dataLayout == DataLayout::NCHW) ? std::initializer_list<unsigned int>({ 2, 2, 5, 5 })
302 : std::initializer_list<unsigned int>({ 2, 5, 5, 2 });
303 TensorShape outputShape = (dataLayout == DataLayout::NCHW) ? std::initializer_list<unsigned int>({ 2, 2, 5, 5 })
304 : std::initializer_list<unsigned int>({ 2, 5, 5, 2 });
305
Ruomei Yan495852f2019-05-23 11:37:33 +0100306 // Checks that inputs/outputs are as we expect them (see definition of CreateDepthwiseConvolution2dWorkloadTest).
307 CheckInputOutput(std::move(workload),
308 TensorInfo(inputShape, DataType::Float32),
309 TensorInfo(outputShape, DataType::Float32));
310}
311
312BOOST_AUTO_TEST_CASE(CreateDepthwiseConvolutionFloat32NhwcWorkload)
313{
314 RefCreateDepthwiseConvolutionWorkloadTest(DataLayout::NHWC);
315}
316
telsoa01c577f2c2018-08-31 09:22:23 +0100317template <typename FullyConnectedWorkloadType, armnn::DataType DataType>
telsoa014fcda012018-03-09 14:13:49 +0000318static void RefCreateFullyConnectedWorkloadTest()
319{
320 Graph graph;
321 RefWorkloadFactory factory;
telsoa01c577f2c2018-08-31 09:22:23 +0100322 auto workload = CreateFullyConnectedWorkloadTest<FullyConnectedWorkloadType, DataType>(factory, graph);
telsoa014fcda012018-03-09 14:13:49 +0000323
telsoa01c577f2c2018-08-31 09:22:23 +0100324 // Checks that outputs and inputs are as we expect them (see definition of CreateFullyConnectedWorkloadTest).
325 float inputsQScale = DataType == armnn::DataType::QuantisedAsymm8 ? 1.0f : 0.0;
326 float outputQScale = DataType == armnn::DataType::QuantisedAsymm8 ? 2.0f : 0.0;
telsoa014fcda012018-03-09 14:13:49 +0000327 CheckInputOutput(std::move(workload),
telsoa01c577f2c2018-08-31 09:22:23 +0100328 TensorInfo({ 3, 1, 4, 5 }, DataType, inputsQScale),
329 TensorInfo({ 3, 7 }, DataType, outputQScale));
telsoa014fcda012018-03-09 14:13:49 +0000330}
331
Francis Murtagh43aec582019-05-27 12:14:10 +0100332BOOST_AUTO_TEST_CASE(CreateFullyConnectedWorkloadFloat32)
telsoa014fcda012018-03-09 14:13:49 +0000333{
Francis Murtagh43aec582019-05-27 12:14:10 +0100334 RefCreateFullyConnectedWorkloadTest<RefFullyConnectedWorkload, armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +0000335}
336
Francis Murtagh43aec582019-05-27 12:14:10 +0100337BOOST_AUTO_TEST_CASE(CreateFullyConnectedWorkloadQuantisedAsymm8)
telsoa014fcda012018-03-09 14:13:49 +0000338{
Francis Murtagh43aec582019-05-27 12:14:10 +0100339 RefCreateFullyConnectedWorkloadTest<RefFullyConnectedWorkload, armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000340}
341
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100342BOOST_AUTO_TEST_CASE(CreateFullyConnectedWorkloadQuantisedSymm16)
Francis Murtagh46c09d02019-05-28 08:15:28 +0100343{
344 RefCreateFullyConnectedWorkloadTest<RefFullyConnectedWorkload, armnn::DataType::QuantisedSymm16>();
345}
346
narpra0155a97bc2018-10-02 14:35:53 +0100347template <typename NormalizationWorkloadType, armnn::DataType DataType>
Matteo Martincigha160b242018-10-18 10:33:23 +0100348static void RefCreateNormalizationWorkloadTest(DataLayout dataLayout)
telsoa014fcda012018-03-09 14:13:49 +0000349{
narpra0155a97bc2018-10-02 14:35:53 +0100350 Graph graph;
telsoa014fcda012018-03-09 14:13:49 +0000351 RefWorkloadFactory factory;
Matteo Martincigha160b242018-10-18 10:33:23 +0100352 auto workload = CreateNormalizationWorkloadTest<NormalizationWorkloadType, DataType>(factory, graph, dataLayout);
353
354 TensorShape inputShape;
355 TensorShape outputShape;
356
357 switch (dataLayout)
358 {
359 case DataLayout::NHWC:
360 inputShape = { 3, 1, 5, 5 };
361 outputShape = { 3, 1, 5, 5 };
362 break;
363 case DataLayout::NCHW:
364 default:
365 inputShape = { 3, 5, 5, 1 };
366 outputShape = { 3, 5, 5, 1 };
367 break;
368 }
telsoa014fcda012018-03-09 14:13:49 +0000369
telsoa01c577f2c2018-08-31 09:22:23 +0100370 // Checks that outputs and inputs are as we expect them (see definition of CreateNormalizationWorkloadTest).
Matteo Martincigha160b242018-10-18 10:33:23 +0100371 CheckInputOutput(std::move(workload), TensorInfo(inputShape, DataType), TensorInfo(outputShape, DataType));
narpra0155a97bc2018-10-02 14:35:53 +0100372}
373
Matteo Martincigh2fc70c52019-06-05 14:12:48 +0100374BOOST_AUTO_TEST_CASE(CreateRefNormalizationFloat32NchwWorkload)
narpra0155a97bc2018-10-02 14:35:53 +0100375{
Matteo Martincigh2fc70c52019-06-05 14:12:48 +0100376 RefCreateNormalizationWorkloadTest<RefNormalizationWorkload, armnn::DataType::Float32>(DataLayout::NCHW);
Matteo Martincigha160b242018-10-18 10:33:23 +0100377}
378
Matteo Martincigh2fc70c52019-06-05 14:12:48 +0100379BOOST_AUTO_TEST_CASE(CreateRefNormalizationFloat32NhwcWorkload)
Matteo Martincigha160b242018-10-18 10:33:23 +0100380{
Matteo Martincigh2fc70c52019-06-05 14:12:48 +0100381 RefCreateNormalizationWorkloadTest<RefNormalizationWorkload, armnn::DataType::Float32>(DataLayout::NHWC);
382}
383
384BOOST_AUTO_TEST_CASE(CreateRefNormalizationUint8NchwWorkload)
385{
386 RefCreateNormalizationWorkloadTest<RefNormalizationWorkload, armnn::DataType::QuantisedAsymm8>(DataLayout::NCHW);
387}
388
389BOOST_AUTO_TEST_CASE(CreateRefNormalizationUint8NhwcWorkload)
390{
391 RefCreateNormalizationWorkloadTest<RefNormalizationWorkload, armnn::DataType::QuantisedAsymm8>(DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000392}
393
Matteo Martincigh6aeb7712019-06-05 17:23:29 +0100394BOOST_AUTO_TEST_CASE(CreateRefNormalizationInt16NchwWorkload)
395{
396 RefCreateNormalizationWorkloadTest<RefNormalizationWorkload, armnn::DataType::QuantisedSymm16>(DataLayout::NCHW);
397}
398
399BOOST_AUTO_TEST_CASE(CreateRefNormalizationInt16NhwcWorkload)
400{
401 RefCreateNormalizationWorkloadTest<RefNormalizationWorkload, armnn::DataType::QuantisedSymm16>(DataLayout::NHWC);
402}
403
telsoa01c577f2c2018-08-31 09:22:23 +0100404template <typename Pooling2dWorkloadType, armnn::DataType DataType>
James Conroy69482272018-10-19 10:41:35 +0100405static void RefCreatePooling2dWorkloadTest(DataLayout dataLayout)
telsoa014fcda012018-03-09 14:13:49 +0000406{
407 Graph graph;
408 RefWorkloadFactory factory;
James Conroy69482272018-10-19 10:41:35 +0100409 auto workload = CreatePooling2dWorkloadTest<Pooling2dWorkloadType, DataType>(factory, graph, dataLayout);
410
411 TensorShape inputShape;
412 TensorShape outputShape;
413
414 switch (dataLayout)
415 {
416 case DataLayout::NHWC:
417 inputShape = { 3, 5, 5, 2 };
418 outputShape = { 3, 2, 4, 2 };
419 break;
420 case DataLayout::NCHW:
421 default:
422 inputShape = { 3, 2, 5, 5 };
423 outputShape = { 3, 2, 2, 4 };
424 }
telsoa014fcda012018-03-09 14:13:49 +0000425
telsoa01c577f2c2018-08-31 09:22:23 +0100426 // Checks that outputs and inputs are as we expect them (see definition of CreatePooling2dWorkloadTest).
James Conroy69482272018-10-19 10:41:35 +0100427 CheckInputOutput(std::move(workload),
428 TensorInfo(inputShape, DataType),
429 TensorInfo(outputShape, DataType));
telsoa014fcda012018-03-09 14:13:49 +0000430}
431
432BOOST_AUTO_TEST_CASE(CreatePooling2dFloat32Workload)
433{
Teresa Charlina3b20472019-06-06 11:12:32 +0100434 RefCreatePooling2dWorkloadTest<RefPooling2dWorkload, armnn::DataType::Float32>(DataLayout::NCHW);
James Conroy69482272018-10-19 10:41:35 +0100435}
436
437BOOST_AUTO_TEST_CASE(CreatePooling2dFloat32NhwcWorkload)
438{
Teresa Charlina3b20472019-06-06 11:12:32 +0100439 RefCreatePooling2dWorkloadTest<RefPooling2dWorkload, armnn::DataType::Float32>(DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000440}
441
442BOOST_AUTO_TEST_CASE(CreatePooling2dUint8Workload)
443{
Teresa Charlina3b20472019-06-06 11:12:32 +0100444 RefCreatePooling2dWorkloadTest<RefPooling2dWorkload, armnn::DataType::QuantisedAsymm8>(DataLayout::NCHW);
James Conroy69482272018-10-19 10:41:35 +0100445}
446
447BOOST_AUTO_TEST_CASE(CreatePooling2dUint8NhwcWorkload)
448{
Teresa Charlina3b20472019-06-06 11:12:32 +0100449 RefCreatePooling2dWorkloadTest<RefPooling2dWorkload, armnn::DataType::QuantisedAsymm8>(DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000450}
451
Teresa Charlin0434df62019-06-06 13:40:35 +0100452BOOST_AUTO_TEST_CASE(CreatePooling2dInt16Workload)
453{
454 RefCreatePooling2dWorkloadTest<RefPooling2dWorkload, armnn::DataType::QuantisedSymm16>(DataLayout::NCHW);
455}
456
457BOOST_AUTO_TEST_CASE(CreatePooling2dInt16NhwcWorkload)
458{
459 RefCreatePooling2dWorkloadTest<RefPooling2dWorkload, armnn::DataType::QuantisedSymm16>(DataLayout::NHWC);
460}
461
telsoa01c577f2c2018-08-31 09:22:23 +0100462template <typename SoftmaxWorkloadType, armnn::DataType DataType>
telsoa014fcda012018-03-09 14:13:49 +0000463static void RefCreateSoftmaxWorkloadTest()
464{
465 Graph graph;
466 RefWorkloadFactory factory;
telsoa01c577f2c2018-08-31 09:22:23 +0100467 auto workload = CreateSoftmaxWorkloadTest<SoftmaxWorkloadType, DataType>(factory, graph);
telsoa014fcda012018-03-09 14:13:49 +0000468
telsoa01c577f2c2018-08-31 09:22:23 +0100469 // Checks that outputs and inputs are as we expect them (see definition of CreateSoftmaxWorkloadTest).
telsoa014fcda012018-03-09 14:13:49 +0000470 CheckInputOutput(
471 std::move(workload),
telsoa01c577f2c2018-08-31 09:22:23 +0100472 TensorInfo({4, 1}, DataType),
473 TensorInfo({4, 1}, DataType));
telsoa014fcda012018-03-09 14:13:49 +0000474}
475
476BOOST_AUTO_TEST_CASE(CreateSoftmaxFloat32Workload)
477{
nikraj01a121de32019-05-29 10:51:05 +0100478 RefCreateSoftmaxWorkloadTest<RefSoftmaxWorkload, armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +0000479}
480
nikraj01a121de32019-05-29 10:51:05 +0100481BOOST_AUTO_TEST_CASE(CreateSoftmaxQuantisedAsymm8Workload)
telsoa014fcda012018-03-09 14:13:49 +0000482{
nikraj01a121de32019-05-29 10:51:05 +0100483 RefCreateSoftmaxWorkloadTest<RefSoftmaxWorkload, armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000484}
485
nikraj01248683f2019-05-29 16:46:50 +0100486BOOST_AUTO_TEST_CASE(CreateSoftmaxQuantisedSymm16Workload)
487{
488 RefCreateSoftmaxWorkloadTest<RefSoftmaxWorkload, armnn::DataType::QuantisedSymm16>();
489}
490
telsoa01c577f2c2018-08-31 09:22:23 +0100491template <typename SplitterWorkloadType, armnn::DataType DataType>
telsoa014fcda012018-03-09 14:13:49 +0000492static void RefCreateSplitterWorkloadTest()
493{
494 Graph graph;
495 RefWorkloadFactory factory;
telsoa01c577f2c2018-08-31 09:22:23 +0100496 auto workload = CreateSplitterWorkloadTest<SplitterWorkloadType, DataType>(factory, graph);
telsoa014fcda012018-03-09 14:13:49 +0000497
telsoa01c577f2c2018-08-31 09:22:23 +0100498 // Checks that outputs are as we expect them (see definition of CreateSplitterWorkloadTest).
telsoa014fcda012018-03-09 14:13:49 +0000499 SplitterQueueDescriptor queueDescriptor = workload->GetData();
Matthew Bentham4cefc412019-06-18 16:14:34 +0100500 auto inputHandle = boost::polymorphic_downcast<RefTensorHandle*>(queueDescriptor.m_Inputs[0]);
telsoa01c577f2c2018-08-31 09:22:23 +0100501 BOOST_TEST((inputHandle->GetTensorInfo() == TensorInfo({ 5, 7, 7 }, DataType)));
surmeh013537c2c2018-05-18 16:31:43 +0100502
Matthew Bentham4cefc412019-06-18 16:14:34 +0100503 auto outputHandle0 = boost::polymorphic_downcast<RefTensorHandle*>(queueDescriptor.m_Outputs[0]);
telsoa01c577f2c2018-08-31 09:22:23 +0100504 BOOST_TEST((outputHandle0->GetTensorInfo() == TensorInfo({ 1, 7, 7 }, DataType)));
surmeh013537c2c2018-05-18 16:31:43 +0100505
Matthew Bentham4cefc412019-06-18 16:14:34 +0100506 auto outputHandle1 = boost::polymorphic_downcast<RefTensorHandle*>(queueDescriptor.m_Outputs[1]);
telsoa01c577f2c2018-08-31 09:22:23 +0100507 BOOST_TEST((outputHandle1->GetTensorInfo() == TensorInfo({ 2, 7, 7 }, DataType)));
surmeh013537c2c2018-05-18 16:31:43 +0100508
Matthew Bentham4cefc412019-06-18 16:14:34 +0100509 auto outputHandle2 = boost::polymorphic_downcast<RefTensorHandle*>(queueDescriptor.m_Outputs[2]);
telsoa01c577f2c2018-08-31 09:22:23 +0100510 BOOST_TEST((outputHandle2->GetTensorInfo() == TensorInfo({ 2, 7, 7 }, DataType)));
telsoa014fcda012018-03-09 14:13:49 +0000511}
512
513BOOST_AUTO_TEST_CASE(CreateSplitterFloat32Workload)
514{
Ruomei Yan25339c32019-05-28 16:48:20 +0100515 RefCreateSplitterWorkloadTest<RefSplitterWorkload, armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +0000516}
517
518BOOST_AUTO_TEST_CASE(CreateSplitterUint8Workload)
519{
Ruomei Yan25339c32019-05-28 16:48:20 +0100520 RefCreateSplitterWorkloadTest<RefSplitterWorkload, armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000521}
522
Jim Flynne242f2d2019-05-22 14:24:13 +0100523template <typename SplitterWorkloadType, typename ConcatWorkloadType, armnn::DataType DataType>
524static void RefCreateSplitterConcatWorkloadTest()
telsoa014fcda012018-03-09 14:13:49 +0000525{
telsoa01c577f2c2018-08-31 09:22:23 +0100526 // Tests that it is possible to decide which output of the splitter layer
Jim Flynne242f2d2019-05-22 14:24:13 +0100527 // should be lined to which input of the concat layer.
telsoa01c577f2c2018-08-31 09:22:23 +0100528 // We tested that is is possible to specify 0th output
Jim Flynne242f2d2019-05-22 14:24:13 +0100529 // of the splitter to be the 1st input to the concat and the 1st output of the splitter to be 0th input
530 // of the concat.
telsoa014fcda012018-03-09 14:13:49 +0000531
532 Graph graph;
533 RefWorkloadFactory factory;
Jim Flynne242f2d2019-05-22 14:24:13 +0100534 auto workloads = CreateSplitterConcatWorkloadTest<SplitterWorkloadType, ConcatWorkloadType, DataType>
535 (factory, graph);
telsoa014fcda012018-03-09 14:13:49 +0000536
537 auto wlSplitter = std::move(workloads.first);
Jim Flynne242f2d2019-05-22 14:24:13 +0100538 auto wlConcat = std::move(workloads.second);
telsoa014fcda012018-03-09 14:13:49 +0000539
telsoa01c577f2c2018-08-31 09:22:23 +0100540 //Checks that the index of inputs/outputs matches what we declared on InputDescriptor construction.
Matthew Bentham4cefc412019-06-18 16:14:34 +0100541 armnn::RefTensorHandle* sOut0 = dynamic_cast<armnn::RefTensorHandle*>(wlSplitter->GetData().m_Outputs[0]);
542 armnn::RefTensorHandle* sOut1 = dynamic_cast<armnn::RefTensorHandle*>(wlSplitter->GetData().m_Outputs[1]);
543 armnn::RefTensorHandle* mIn0 = dynamic_cast<armnn::RefTensorHandle*>(wlConcat->GetData().m_Inputs[0]);
544 armnn::RefTensorHandle* mIn1 = dynamic_cast<armnn::RefTensorHandle*>(wlConcat->GetData().m_Inputs[1]);
telsoa014fcda012018-03-09 14:13:49 +0000545
546 BOOST_TEST(sOut0);
547 BOOST_TEST(sOut1);
548 BOOST_TEST(mIn0);
549 BOOST_TEST(mIn1);
550
551 bool validDataPointers = (sOut0 == mIn1) && (sOut1 == mIn0);
552
553 BOOST_TEST(validDataPointers);
554}
555
Jim Flynne242f2d2019-05-22 14:24:13 +0100556BOOST_AUTO_TEST_CASE(CreateSplitterConcatFloat32)
telsoa014fcda012018-03-09 14:13:49 +0000557{
Ruomei Yan25339c32019-05-28 16:48:20 +0100558 RefCreateSplitterConcatWorkloadTest<RefSplitterWorkload, RefConcatWorkload, DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +0000559}
560
Jim Flynne242f2d2019-05-22 14:24:13 +0100561BOOST_AUTO_TEST_CASE(CreateSplitterConcatUint8)
telsoa014fcda012018-03-09 14:13:49 +0000562{
Ruomei Yan25339c32019-05-28 16:48:20 +0100563 RefCreateSplitterConcatWorkloadTest<RefSplitterWorkload, RefConcatWorkload, DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000564}
565
telsoa01c577f2c2018-08-31 09:22:23 +0100566template <typename SplitterWorkloadType, typename ActivationWorkloadType, armnn::DataType DataType>
telsoa014fcda012018-03-09 14:13:49 +0000567static void RefCreateSingleOutputMultipleInputsTest()
568{
telsoa01c577f2c2018-08-31 09:22:23 +0100569 // Tests that it is possible to assign multiple (two) different layers to each of the outputs of a splitter layer.
570 // We created a splitter with two outputs. That each of those outputs is used by two different activation layers.
telsoa014fcda012018-03-09 14:13:49 +0000571
572 Graph graph;
573 RefWorkloadFactory factory;
574 std::unique_ptr<SplitterWorkloadType> wlSplitter;
575 std::unique_ptr<ActivationWorkloadType> wlActiv0_0;
576 std::unique_ptr<ActivationWorkloadType> wlActiv0_1;
577 std::unique_ptr<ActivationWorkloadType> wlActiv1_0;
578 std::unique_ptr<ActivationWorkloadType> wlActiv1_1;
579
580 CreateSplitterMultipleInputsOneOutputWorkloadTest<SplitterWorkloadType,
telsoa01c577f2c2018-08-31 09:22:23 +0100581 ActivationWorkloadType, DataType>(factory, graph, wlSplitter, wlActiv0_0, wlActiv0_1, wlActiv1_0, wlActiv1_1);
telsoa014fcda012018-03-09 14:13:49 +0000582
Matthew Bentham4cefc412019-06-18 16:14:34 +0100583 armnn::RefTensorHandle* sOut0 = dynamic_cast<armnn::RefTensorHandle*>(wlSplitter->GetData().m_Outputs[0]);
584 armnn::RefTensorHandle* sOut1 = dynamic_cast<armnn::RefTensorHandle*>(wlSplitter->GetData().m_Outputs[1]);
585 armnn::RefTensorHandle* activ0_0Im = dynamic_cast<armnn::RefTensorHandle*>(wlActiv0_0->GetData().m_Inputs[0]);
586 armnn::RefTensorHandle* activ0_1Im = dynamic_cast<armnn::RefTensorHandle*>(wlActiv0_1->GetData().m_Inputs[0]);
587 armnn::RefTensorHandle* activ1_0Im = dynamic_cast<armnn::RefTensorHandle*>(wlActiv1_0->GetData().m_Inputs[0]);
588 armnn::RefTensorHandle* activ1_1Im = dynamic_cast<armnn::RefTensorHandle*>(wlActiv1_1->GetData().m_Inputs[0]);
telsoa014fcda012018-03-09 14:13:49 +0000589
590
591 BOOST_TEST(sOut0);
592 BOOST_TEST(sOut1);
593 BOOST_TEST(activ0_0Im);
594 BOOST_TEST(activ0_1Im);
595 BOOST_TEST(activ1_0Im);
596 BOOST_TEST(activ1_1Im);
597
598 bool validDataPointers = (sOut0 == activ0_0Im) && (sOut0 == activ0_1Im) &&
599 (sOut1 == activ1_0Im) && (sOut1 == activ1_1Im);
600
601 BOOST_TEST(validDataPointers);
602}
603
604BOOST_AUTO_TEST_CASE(CreateSingleOutputMultipleInputsFloat32)
605{
Ruomei Yan25339c32019-05-28 16:48:20 +0100606 RefCreateSingleOutputMultipleInputsTest<RefSplitterWorkload, RefActivationWorkload,
telsoa01c577f2c2018-08-31 09:22:23 +0100607 armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +0000608}
609
610BOOST_AUTO_TEST_CASE(CreateSingleOutputMultipleInputsUint8)
611{
Ruomei Yan25339c32019-05-28 16:48:20 +0100612 RefCreateSingleOutputMultipleInputsTest<RefSplitterWorkload, RefActivationWorkload,
telsoa01c577f2c2018-08-31 09:22:23 +0100613 armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000614}
615
telsoa01c577f2c2018-08-31 09:22:23 +0100616template <typename ResizeBilinearWorkloadType, armnn::DataType DataType>
James Conroy59540822018-10-11 12:39:05 +0100617static void RefCreateResizeBilinearTest(DataLayout dataLayout)
telsoa014fcda012018-03-09 14:13:49 +0000618{
619 Graph graph;
620 RefWorkloadFactory factory;
James Conroy59540822018-10-11 12:39:05 +0100621 auto workload = CreateResizeBilinearWorkloadTest<ResizeBilinearWorkloadType, DataType>(factory, graph, dataLayout);
622
623 TensorShape inputShape;
624 TensorShape outputShape;
625
626 switch (dataLayout)
627 {
628 case DataLayout::NHWC:
629 inputShape = { 2, 4, 4, 3 };
630 outputShape = { 2, 2, 2, 3 };
631 break;
James Conroy69482272018-10-19 10:41:35 +0100632 case DataLayout::NCHW:
633 default:
James Conroy59540822018-10-11 12:39:05 +0100634 inputShape = { 2, 3, 4, 4 };
635 outputShape = { 2, 3, 2, 2 };
636 }
telsoa014fcda012018-03-09 14:13:49 +0000637
telsoa01c577f2c2018-08-31 09:22:23 +0100638 // Checks that outputs and inputs are as we expect them (see definition of CreateResizeBilinearWorkloadTest).
James Conroy69482272018-10-19 10:41:35 +0100639 CheckInputOutput(std::move(workload),
640 TensorInfo(inputShape, DataType),
641 TensorInfo(outputShape, DataType));
telsoa014fcda012018-03-09 14:13:49 +0000642}
643
644BOOST_AUTO_TEST_CASE(CreateResizeBilinearFloat32)
645{
Ellen Norris-Thompson719d2a92019-06-12 10:23:57 +0100646 RefCreateResizeBilinearTest<RefResizeBilinearWorkload, armnn::DataType::Float32>(DataLayout::NCHW);
telsoa014fcda012018-03-09 14:13:49 +0000647}
648
649BOOST_AUTO_TEST_CASE(CreateResizeBilinearUint8)
650{
Ellen Norris-Thompson719d2a92019-06-12 10:23:57 +0100651 RefCreateResizeBilinearTest<RefResizeBilinearWorkload, armnn::DataType::QuantisedAsymm8>(DataLayout::NCHW);
James Conroy59540822018-10-11 12:39:05 +0100652}
653
Ellen Norris-Thompson3cb85f32019-06-17 11:32:49 +0100654BOOST_AUTO_TEST_CASE(CreateResizeBilinearQuantisedAsymm16)
655{
656 RefCreateResizeBilinearTest<RefResizeBilinearWorkload, armnn::DataType::QuantisedSymm16>(DataLayout::NCHW);
657}
658
James Conroy59540822018-10-11 12:39:05 +0100659BOOST_AUTO_TEST_CASE(CreateResizeBilinearFloat32Nhwc)
660{
Ellen Norris-Thompson719d2a92019-06-12 10:23:57 +0100661 RefCreateResizeBilinearTest<RefResizeBilinearWorkload, armnn::DataType::Float32>(DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000662}
663
nikraj0133732f62019-06-04 15:35:34 +0100664template <typename RsqrtWorkloadType, armnn::DataType DataType>
665static void RefCreateRsqrtTest()
666{
667 Graph graph;
668 RefWorkloadFactory factory;
669
670 auto workload = CreateRsqrtWorkloadTest<RsqrtWorkloadType, DataType>(factory, graph);
671
672 // Checks that outputs are as we expect them (see definition of CreateRsqrtWorkloadTest).
673 CheckInputOutput(std::move(workload),
674 TensorInfo({ 1, 1 }, DataType),
675 TensorInfo({ 1, 1 }, DataType));
676
677}
678
679BOOST_AUTO_TEST_CASE(CreateRsqrtFloat32)
680{
nikraj0199a66312019-06-06 10:31:27 +0100681 RefCreateRsqrtTest<RefRsqrtWorkload, armnn::DataType::Float32>();
nikraj0133732f62019-06-04 15:35:34 +0100682}
683
nikraj010421e7f2019-06-14 09:40:34 +0100684BOOST_AUTO_TEST_CASE(CreateRsqrtUint8)
685{
686 RefCreateRsqrtTest<RefRsqrtWorkload, armnn::DataType::QuantisedAsymm8>();
687}
688
nikraj0124d73212019-06-14 14:20:40 +0100689BOOST_AUTO_TEST_CASE(CreateRsqrtQsymm16)
690{
691 RefCreateRsqrtTest<RefRsqrtWorkload, armnn::DataType::QuantisedSymm16>();
692}
693
Francis Murtagh57f13d52019-06-24 14:24:36 +0100694template <typename BatchToSpaceNdWorkloadType, armnn::DataType DataType>
695static void RefCreateBatchToSpaceNdTest()
696{
697 Graph graph;
698 RefWorkloadFactory factory;
699
700 auto workload = CreateBatchToSpaceNdWorkloadTest<BatchToSpaceNdWorkloadType, DataType>(factory, graph);
701
702 CheckInputOutput(std::move(workload),
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100703 TensorInfo({ 1, 1, 1, 1 }, DataType),
704 TensorInfo({ 1, 1, 1, 1 }, DataType));
Francis Murtagh57f13d52019-06-24 14:24:36 +0100705}
706
707BOOST_AUTO_TEST_CASE(CreateBatchToSpaceNdFloat32)
708{
709 RefCreateBatchToSpaceNdTest<RefBatchToSpaceNdWorkload, armnn::DataType::Float32>();
710}
711
712BOOST_AUTO_TEST_CASE(CreateBatchToSpaceNdUint8)
713{
714 RefCreateBatchToSpaceNdTest<RefBatchToSpaceNdWorkload, armnn::DataType::QuantisedAsymm8>();
715}
716
Francis Murtaghd0dfe172019-06-25 10:57:10 +0100717BOOST_AUTO_TEST_CASE(CreateBatchToSpaceNdQSymm16)
718{
719 RefCreateBatchToSpaceNdTest<RefBatchToSpaceNdWorkload, armnn::DataType::QuantisedSymm16>();
720}
721
Matteo Martincighb63973e2018-10-16 16:23:33 +0100722template <typename L2NormalizationWorkloadType, armnn::DataType DataType>
723static void RefCreateL2NormalizationTest(DataLayout dataLayout)
telsoa014fcda012018-03-09 14:13:49 +0000724{
725 Graph graph;
726 RefWorkloadFactory factory;
Matteo Martincighb63973e2018-10-16 16:23:33 +0100727 auto workload =
728 CreateL2NormalizationWorkloadTest<L2NormalizationWorkloadType, DataType>(factory, graph, dataLayout);
729
730 TensorShape inputShape;
731 TensorShape outputShape;
732
733 switch (dataLayout)
734 {
735 case DataLayout::NHWC:
736 inputShape = { 5, 50, 67, 20 };
737 outputShape = { 5, 50, 67, 20 };
738 break;
739 case DataLayout::NCHW:
740 default:
741 inputShape = { 5, 20, 50, 67 };
742 outputShape = { 5, 20, 50, 67 };
743 break;
744 }
telsoa014fcda012018-03-09 14:13:49 +0000745
telsoa01c577f2c2018-08-31 09:22:23 +0100746 // Checks that outputs and inputs are as we expect them (see definition of CreateL2NormalizationWorkloadTest).
Matteo Martincighb63973e2018-10-16 16:23:33 +0100747 CheckInputOutput(std::move(workload), TensorInfo(inputShape, DataType), TensorInfo(outputShape, DataType));
748}
749
750BOOST_AUTO_TEST_CASE(CreateL2NormalizationFloat32)
751{
Ferran Balaguerd73d14f2019-06-10 10:29:54 +0100752 RefCreateL2NormalizationTest<RefL2NormalizationWorkload, armnn::DataType::Float32>(DataLayout::NCHW);
Matteo Martincighb63973e2018-10-16 16:23:33 +0100753}
754
755BOOST_AUTO_TEST_CASE(CreateL2NormalizationFloat32Nhwc)
756{
Ferran Balaguerd73d14f2019-06-10 10:29:54 +0100757 RefCreateL2NormalizationTest<RefL2NormalizationWorkload, armnn::DataType::Float32>(DataLayout::NHWC);
758}
759
760BOOST_AUTO_TEST_CASE(CreateL2NormalizationInt16)
761{
762 RefCreateL2NormalizationTest<RefL2NormalizationWorkload, armnn::DataType::QuantisedSymm16>(DataLayout::NCHW);
763}
764
765BOOST_AUTO_TEST_CASE(CreateL2NormalizationInt16Nhwc)
766{
767 RefCreateL2NormalizationTest<RefL2NormalizationWorkload, armnn::DataType::QuantisedSymm16>(DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000768}
769
Ferran Balaguerc6138d82019-06-13 17:23:50 +0100770BOOST_AUTO_TEST_CASE(CreateL2NormalizationUint8)
771{
772 RefCreateL2NormalizationTest<RefL2NormalizationWorkload, armnn::DataType::QuantisedAsymm8>(DataLayout::NCHW);
773}
774
775BOOST_AUTO_TEST_CASE(CreateL2NormalizationUint8Nhwc)
776{
777 RefCreateL2NormalizationTest<RefL2NormalizationWorkload, armnn::DataType::QuantisedAsymm8>(DataLayout::NHWC);
778}
779
telsoa01c577f2c2018-08-31 09:22:23 +0100780template <typename ReshapeWorkloadType, armnn::DataType DataType>
telsoa014fcda012018-03-09 14:13:49 +0000781static void RefCreateReshapeWorkloadTest()
782{
783 Graph graph;
784 RefWorkloadFactory factory;
telsoa01c577f2c2018-08-31 09:22:23 +0100785 auto workload = CreateReshapeWorkloadTest<ReshapeWorkloadType, DataType>(factory, graph);
telsoa014fcda012018-03-09 14:13:49 +0000786
telsoa01c577f2c2018-08-31 09:22:23 +0100787 // Checks that outputs and inputs are as we expect them (see definition of CreateReshapeWorkloadTest).
telsoa014fcda012018-03-09 14:13:49 +0000788 CheckInputOutput(
789 std::move(workload),
telsoa01c577f2c2018-08-31 09:22:23 +0100790 TensorInfo({ 4, 1 }, DataType),
791 TensorInfo({ 1, 4 }, DataType));
telsoa014fcda012018-03-09 14:13:49 +0000792}
793
Nina Drozd8ed4b8c2019-05-29 10:41:04 +0100794BOOST_AUTO_TEST_CASE(CreateReshapeWorkloadFloat32)
telsoa014fcda012018-03-09 14:13:49 +0000795{
Nina Drozd2f2778f2019-05-27 10:37:05 +0100796 RefCreateReshapeWorkloadTest<RefReshapeWorkload, armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +0000797}
798
Nina Drozd8ed4b8c2019-05-29 10:41:04 +0100799BOOST_AUTO_TEST_CASE(CreateReshapeWorkloadQuantisedAsymm8)
telsoa014fcda012018-03-09 14:13:49 +0000800{
Nina Drozd2f2778f2019-05-27 10:37:05 +0100801 RefCreateReshapeWorkloadTest<RefReshapeWorkload, armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000802}
803
Nina Drozd8ed4b8c2019-05-29 10:41:04 +0100804BOOST_AUTO_TEST_CASE(CreateReshapeWorkloadQuantisedSymm16)
805{
806 RefCreateReshapeWorkloadTest<RefReshapeWorkload, armnn::DataType::QuantisedSymm16>();
807}
808
Jim Flynne242f2d2019-05-22 14:24:13 +0100809template <typename ConcatWorkloadType, armnn::DataType DataType>
810static void RefCreateConcatWorkloadTest(const armnn::TensorShape& outputShape,
narpra015cdda352018-11-19 15:30:27 +0000811 unsigned int concatAxis)
812{
813 Graph graph;
814 RefWorkloadFactory factory;
Jim Flynne242f2d2019-05-22 14:24:13 +0100815 auto workload = CreateConcatWorkloadTest<ConcatWorkloadType, DataType>(factory, graph, outputShape, concatAxis);
narpra015cdda352018-11-19 15:30:27 +0000816
817 CheckInputsOutput(std::move(workload),
818 TensorInfo({ 2, 3, 2, 5 }, DataType),
819 TensorInfo({ 2, 3, 2, 5 }, DataType),
820 TensorInfo(outputShape, DataType));
821}
822
Jim Flynne242f2d2019-05-22 14:24:13 +0100823BOOST_AUTO_TEST_CASE(CreateConcatDim0Float32Workload)
narpra015cdda352018-11-19 15:30:27 +0000824{
Jim Flynne242f2d2019-05-22 14:24:13 +0100825 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::Float32>({ 4, 3, 2, 5 }, 0);
narpra015cdda352018-11-19 15:30:27 +0000826}
827
Jim Flynne242f2d2019-05-22 14:24:13 +0100828BOOST_AUTO_TEST_CASE(CreateConcatDim0Uint8Workload)
narpra015cdda352018-11-19 15:30:27 +0000829{
Jim Flynne242f2d2019-05-22 14:24:13 +0100830 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::QuantisedAsymm8>({ 4, 3, 2, 5 }, 0);
Jim Flynncbb66aa2019-05-15 13:03:54 +0100831}
832
Jim Flynne242f2d2019-05-22 14:24:13 +0100833BOOST_AUTO_TEST_CASE(CreateConcatDim0Uint16Workload)
Jim Flynncbb66aa2019-05-15 13:03:54 +0100834{
Jim Flynne242f2d2019-05-22 14:24:13 +0100835 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::QuantisedSymm16>({ 4, 3, 2, 5 }, 0);
narpra015cdda352018-11-19 15:30:27 +0000836}
837
Jim Flynne242f2d2019-05-22 14:24:13 +0100838BOOST_AUTO_TEST_CASE(CreateConcatDim1Float32Workload)
narpra015cdda352018-11-19 15:30:27 +0000839{
Jim Flynne242f2d2019-05-22 14:24:13 +0100840 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::Float32>({ 2, 6, 2, 5 }, 1);
narpra015cdda352018-11-19 15:30:27 +0000841}
842
Jim Flynne242f2d2019-05-22 14:24:13 +0100843BOOST_AUTO_TEST_CASE(CreateConcatDim1Uint8Workload)
narpra015cdda352018-11-19 15:30:27 +0000844{
Jim Flynne242f2d2019-05-22 14:24:13 +0100845 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::QuantisedAsymm8>({ 2, 6, 2, 5 }, 1);
narpra015cdda352018-11-19 15:30:27 +0000846}
847
Jim Flynne242f2d2019-05-22 14:24:13 +0100848BOOST_AUTO_TEST_CASE(CreateConcatDim2Float32Workload)
narpra015cdda352018-11-19 15:30:27 +0000849{
Jim Flynne242f2d2019-05-22 14:24:13 +0100850 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::Float32>({ 2, 3, 4, 5 }, 2);
narpra015cdda352018-11-19 15:30:27 +0000851}
852
Jim Flynne242f2d2019-05-22 14:24:13 +0100853BOOST_AUTO_TEST_CASE(CreateConcatDim2Uint8Workload)
narpra015cdda352018-11-19 15:30:27 +0000854{
Jim Flynne242f2d2019-05-22 14:24:13 +0100855 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::QuantisedAsymm8>({ 2, 3, 4, 5 }, 2);
narpra015cdda352018-11-19 15:30:27 +0000856}
857
Jim Flynne242f2d2019-05-22 14:24:13 +0100858BOOST_AUTO_TEST_CASE(CreateConcatDim3Float32Workload)
narpra015cdda352018-11-19 15:30:27 +0000859{
Jim Flynne242f2d2019-05-22 14:24:13 +0100860 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::Float32>({ 2, 3, 2, 10 }, 3);
narpra015cdda352018-11-19 15:30:27 +0000861}
862
Jim Flynne242f2d2019-05-22 14:24:13 +0100863BOOST_AUTO_TEST_CASE(CreateConcatDim3Uint8Workload)
narpra015cdda352018-11-19 15:30:27 +0000864{
Jim Flynne242f2d2019-05-22 14:24:13 +0100865 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::QuantisedAsymm8>({ 2, 3, 2, 10 }, 3);
narpra015cdda352018-11-19 15:30:27 +0000866}
867
Nina Drozd58ef2c62019-05-16 12:09:18 +0100868template <typename ConstantWorkloadType, armnn::DataType DataType>
869static void RefCreateConstantWorkloadTest(const armnn::TensorShape& outputShape)
870{
871 armnn::Graph graph;
872 RefWorkloadFactory factory;
873 auto workload = CreateConstantWorkloadTest<ConstantWorkloadType, DataType>(factory, graph, outputShape);
874
875 // Check output is as expected
876 auto queueDescriptor = workload->GetData();
Matthew Bentham4cefc412019-06-18 16:14:34 +0100877 auto outputHandle = boost::polymorphic_downcast<RefTensorHandle*>(queueDescriptor.m_Outputs[0]);
Nina Drozd58ef2c62019-05-16 12:09:18 +0100878 BOOST_TEST((outputHandle->GetTensorInfo() == TensorInfo(outputShape, DataType)));
879}
880
881BOOST_AUTO_TEST_CASE(CreateConstantUint8Workload)
882{
883 RefCreateConstantWorkloadTest<RefConstantWorkload, armnn::DataType::QuantisedAsymm8>({ 2, 3, 2, 10 });
884}
885
886BOOST_AUTO_TEST_CASE(CreateConstantInt16Workload)
887{
888 RefCreateConstantWorkloadTest<RefConstantWorkload, armnn::DataType::QuantisedSymm16>({ 2, 3, 2, 10 });
889}
890
891BOOST_AUTO_TEST_CASE(CreateConstantFloat32Workload)
892{
893 RefCreateConstantWorkloadTest<RefConstantWorkload, armnn::DataType::Float32>({ 2, 3, 2, 10 });
894}
895
896BOOST_AUTO_TEST_CASE(CreateConstantSigned32Workload)
897{
898 RefCreateConstantWorkloadTest<RefConstantWorkload, armnn::DataType::Signed32>({ 2, 3, 2, 10 });
899}
900
Matteo Martincighbf0e7222019-06-20 17:17:45 +0100901static void RefCreatePreluWorkloadTest(const armnn::TensorShape& inputShape,
902 const armnn::TensorShape& alphaShape,
903 const armnn::TensorShape& outputShape,
904 armnn::DataType dataType)
Matteo Martincighab9e5252019-06-13 17:27:46 +0100905{
906 armnn::Graph graph;
907 RefWorkloadFactory factory;
Matteo Martincighbf0e7222019-06-20 17:17:45 +0100908 auto workload = CreatePreluWorkloadTest<RefPreluWorkload>(factory,
909 graph,
910 inputShape,
911 alphaShape,
912 outputShape,
913 dataType);
Matteo Martincighab9e5252019-06-13 17:27:46 +0100914
915 // Check output is as expected
916 auto queueDescriptor = workload->GetData();
Matthew Bentham4cefc412019-06-18 16:14:34 +0100917 auto outputHandle = boost::polymorphic_downcast<RefTensorHandle*>(queueDescriptor.m_Outputs[0]);
Matteo Martincighbf0e7222019-06-20 17:17:45 +0100918 BOOST_TEST((outputHandle->GetTensorInfo() == TensorInfo(outputShape, dataType)));
Matteo Martincighab9e5252019-06-13 17:27:46 +0100919}
920
921BOOST_AUTO_TEST_CASE(CreatePreluFloat32Workload)
922{
Matteo Martincighbf0e7222019-06-20 17:17:45 +0100923 RefCreatePreluWorkloadTest({ 1, 4, 1, 2 }, { 5, 4, 3, 1 }, { 5, 4, 3, 2 }, armnn::DataType::Float32);
Matteo Martincighab9e5252019-06-13 17:27:46 +0100924}
925
926BOOST_AUTO_TEST_CASE(CreatePreluUint8Workload)
927{
Matteo Martincighbf0e7222019-06-20 17:17:45 +0100928 RefCreatePreluWorkloadTest({ 1, 4, 1, 2 }, { 5, 4, 3, 1 }, { 5, 4, 3, 2 }, armnn::DataType::QuantisedAsymm8);
Matteo Martincighab9e5252019-06-13 17:27:46 +0100929}
930
931BOOST_AUTO_TEST_CASE(CreatePreluInt16Workload)
932{
Matteo Martincighbf0e7222019-06-20 17:17:45 +0100933 RefCreatePreluWorkloadTest({ 1, 4, 1, 2 }, { 5, 4, 3, 1 }, { 5, 4, 3, 2 }, armnn::DataType::QuantisedSymm16);
934}
935
936BOOST_AUTO_TEST_CASE(CreatePreluFloat32NoBroadcastWorkload)
937{
938 BOOST_CHECK_THROW(RefCreatePreluWorkloadTest({ 1, 4, 7, 2 }, { 5, 4, 3, 1 }, { 5, 4, 3, 2 },
939 armnn::DataType::Float32),
940 armnn::InvalidArgumentException);
941}
942
943BOOST_AUTO_TEST_CASE(CreatePreluUint8NoBroadcastWorkload)
944{
945 BOOST_CHECK_THROW(RefCreatePreluWorkloadTest({ 1, 4, 7, 2 }, { 5, 4, 3, 1 }, { 5, 4, 3, 2 },
946 armnn::DataType::QuantisedAsymm8),
947 armnn::InvalidArgumentException);
948}
949
950BOOST_AUTO_TEST_CASE(CreatePreluInt16NoBroadcastWorkload)
951{
952 BOOST_CHECK_THROW(RefCreatePreluWorkloadTest({ 1, 4, 7, 2 }, { 5, 4, 3, 1 }, { 5, 4, 3, 2 },
953 armnn::DataType::QuantisedSymm16),
954 armnn::InvalidArgumentException);
Matteo Martincighab9e5252019-06-13 17:27:46 +0100955}
956
telsoa014fcda012018-03-09 14:13:49 +0000957BOOST_AUTO_TEST_SUITE_END()