blob: a0c614564d40965efedfbb6f38d2afe622c21979 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
arovir0143095f32018-10-09 18:04:24 +01005
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <test/CreateWorkload.hpp>
arovir0143095f32018-10-09 18:04:24 +01007
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00008#include <backendsCommon/CpuTensorHandle.hpp>
9#include <reference/RefWorkloadFactory.hpp>
10#include <reference/workloads/RefWorkloads.hpp>
telsoa014fcda012018-03-09 14:13:49 +000011
12namespace
13{
14
15template<typename Workload>
16void CheckInputOutput(std::unique_ptr<Workload> workload, const TensorInfo& inputInfo, const TensorInfo& outputInfo)
17{
18 auto queueDescriptor = workload->GetData();
19 auto inputHandle = boost::polymorphic_downcast<ConstCpuTensorHandle*>(queueDescriptor.m_Inputs[0]);
20 auto outputHandle = boost::polymorphic_downcast<CpuTensorHandle*>(queueDescriptor.m_Outputs[0]);
21 BOOST_TEST((inputHandle->GetTensorInfo() == inputInfo));
22 BOOST_TEST((outputHandle->GetTensorInfo() == outputInfo));
23}
24
25template <typename Workload>
26void CheckInputsOutput(std::unique_ptr<Workload> workload,
27 const TensorInfo& inputInfo0,
28 const TensorInfo& inputInfo1,
29 const TensorInfo& outputInfo)
30{
31 auto queueDescriptor = workload->GetData();
32 auto inputHandle0 = boost::polymorphic_downcast<ConstCpuTensorHandle*>(queueDescriptor.m_Inputs[0]);
33 auto inputHandle1 = boost::polymorphic_downcast<ConstCpuTensorHandle*>(queueDescriptor.m_Inputs[1]);
34 auto outputHandle = boost::polymorphic_downcast<CpuTensorHandle*>(queueDescriptor.m_Outputs[0]);
35 BOOST_TEST((inputHandle0->GetTensorInfo() == inputInfo0));
36 BOOST_TEST((inputHandle1->GetTensorInfo() == inputInfo1));
37 BOOST_TEST((outputHandle->GetTensorInfo() == outputInfo));
38}
39}
40
41BOOST_AUTO_TEST_SUITE(CreateWorkloadRef)
42
telsoa01c577f2c2018-08-31 09:22:23 +010043template <typename ActivationWorkloadType, armnn::DataType DataType>
telsoa014fcda012018-03-09 14:13:49 +000044static void RefCreateActivationWorkloadTest()
45{
46 Graph graph;
47 RefWorkloadFactory factory;
telsoa01c577f2c2018-08-31 09:22:23 +010048 auto workload = CreateActivationWorkloadTest<ActivationWorkloadType, DataType>(factory, graph);
telsoa014fcda012018-03-09 14:13:49 +000049
telsoa01c577f2c2018-08-31 09:22:23 +010050 // Checks that outputs are as we expect them (see definition of CreateActivationWorkloadTest).
telsoa014fcda012018-03-09 14:13:49 +000051 CheckInputOutput(std::move(workload),
telsoa01c577f2c2018-08-31 09:22:23 +010052 TensorInfo({ 1, 1 }, DataType),
53 TensorInfo({ 1, 1 }, DataType));
telsoa014fcda012018-03-09 14:13:49 +000054}
55
56BOOST_AUTO_TEST_CASE(CreateActivationFloat32Workload)
57{
Nattapat Chaimanowongae2c5f02019-04-24 16:19:57 +010058 RefCreateActivationWorkloadTest<RefActivationWorkload, armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +000059}
60
61BOOST_AUTO_TEST_CASE(CreateActivationUint8Workload)
62{
Nattapat Chaimanowongae2c5f02019-04-24 16:19:57 +010063 RefCreateActivationWorkloadTest<RefActivationWorkload, armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +000064}
65
David Beckbc392452018-09-10 14:47:28 +010066template <typename WorkloadType,
67 typename DescriptorType,
68 typename LayerType,
69 armnn::DataType DataType>
Éanna Ó Catháind57415d2018-11-28 16:24:38 +000070static void RefCreateElementwiseWorkloadTest()
telsoa014fcda012018-03-09 14:13:49 +000071{
72 Graph graph;
73 RefWorkloadFactory factory;
Éanna Ó Catháind57415d2018-11-28 16:24:38 +000074 auto workload = CreateElementwiseWorkloadTest<WorkloadType, DescriptorType, LayerType, DataType>(
75 factory, graph);
telsoa014fcda012018-03-09 14:13:49 +000076
telsoa014fcda012018-03-09 14:13:49 +000077 CheckInputsOutput(std::move(workload),
telsoa01c577f2c2018-08-31 09:22:23 +010078 TensorInfo({ 2, 3 }, DataType),
79 TensorInfo({ 2, 3 }, DataType),
80 TensorInfo({ 2, 3 }, DataType));
telsoa014fcda012018-03-09 14:13:49 +000081}
82
83BOOST_AUTO_TEST_CASE(CreateAdditionFloatWorkload)
84{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +010085 RefCreateElementwiseWorkloadTest<RefAdditionWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +000086 AdditionQueueDescriptor,
87 AdditionLayer,
88 armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +000089}
90
91BOOST_AUTO_TEST_CASE(CreateAdditionUint8Workload)
92{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +010093 RefCreateElementwiseWorkloadTest<RefAdditionWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +000094 AdditionQueueDescriptor,
95 AdditionLayer,
96 armnn::DataType::QuantisedAsymm8>();
David Beckbc392452018-09-10 14:47:28 +010097}
98
Sadik Armagan2999a022019-04-09 14:20:12 +010099BOOST_AUTO_TEST_CASE(CreateAdditionInt16Workload)
100{
101 RefCreateElementwiseWorkloadTest<RefAdditionWorkload,
102 AdditionQueueDescriptor,
103 AdditionLayer,
104 armnn::DataType::QuantisedSymm16>();
105}
106
David Beckbc392452018-09-10 14:47:28 +0100107BOOST_AUTO_TEST_CASE(CreateSubtractionFloatWorkload)
108{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +0100109 RefCreateElementwiseWorkloadTest<RefSubtractionWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +0000110 SubtractionQueueDescriptor,
111 SubtractionLayer,
112 armnn::DataType::Float32>();
David Beckbc392452018-09-10 14:47:28 +0100113}
114
115BOOST_AUTO_TEST_CASE(CreateSubtractionUint8Workload)
116{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +0100117 RefCreateElementwiseWorkloadTest<RefSubtractionWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +0000118 SubtractionQueueDescriptor,
119 SubtractionLayer,
120 armnn::DataType::QuantisedAsymm8>();
David Beckbc392452018-09-10 14:47:28 +0100121}
122
Sadik Armagan2999a022019-04-09 14:20:12 +0100123BOOST_AUTO_TEST_CASE(CreateSubtractionInt16Workload)
124{
125 RefCreateElementwiseWorkloadTest<RefSubtractionWorkload,
126 SubtractionQueueDescriptor,
127 SubtractionLayer,
128 armnn::DataType::QuantisedSymm16>();
129}
130
David Beckbc392452018-09-10 14:47:28 +0100131BOOST_AUTO_TEST_CASE(CreateMultiplicationFloatWorkload)
132{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +0100133 RefCreateElementwiseWorkloadTest<RefMultiplicationWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +0000134 MultiplicationQueueDescriptor,
135 MultiplicationLayer,
136 armnn::DataType::Float32>();
David Beckbc392452018-09-10 14:47:28 +0100137}
138
139BOOST_AUTO_TEST_CASE(CreateMultiplicationUint8Workload)
140{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +0100141 RefCreateElementwiseWorkloadTest<RefMultiplicationWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +0000142 MultiplicationQueueDescriptor,
143 MultiplicationLayer,
144 armnn::DataType::QuantisedAsymm8>();
David Beckbc392452018-09-10 14:47:28 +0100145}
146
Sadik Armagan2999a022019-04-09 14:20:12 +0100147BOOST_AUTO_TEST_CASE(CreateMultiplicationInt16Workload)
148{
149 RefCreateElementwiseWorkloadTest<RefMultiplicationWorkload,
150 MultiplicationQueueDescriptor,
151 MultiplicationLayer,
152 armnn::DataType::QuantisedSymm16>();
153}
154
David Beckbc392452018-09-10 14:47:28 +0100155BOOST_AUTO_TEST_CASE(CreateDivisionFloatWorkload)
156{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +0100157 RefCreateElementwiseWorkloadTest<RefDivisionWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +0000158 DivisionQueueDescriptor,
159 DivisionLayer,
160 armnn::DataType::Float32>();
David Beckbc392452018-09-10 14:47:28 +0100161}
162
163BOOST_AUTO_TEST_CASE(CreateDivisionUint8Workload)
164{
Sadik Armagan2e6dc3a2019-04-03 17:48:18 +0100165 RefCreateElementwiseWorkloadTest<RefDivisionWorkload,
Éanna Ó Catháind57415d2018-11-28 16:24:38 +0000166 DivisionQueueDescriptor,
167 DivisionLayer,
168 armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000169}
170
Sadik Armagan2999a022019-04-09 14:20:12 +0100171BOOST_AUTO_TEST_CASE(CreateDivisionInt16Workload)
172{
173 RefCreateElementwiseWorkloadTest<RefDivisionWorkload,
174 DivisionQueueDescriptor,
175 DivisionLayer,
176 armnn::DataType::QuantisedSymm16>();
177}
178
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100179template <typename BatchNormalizationWorkloadType, armnn::DataType DataType>
180static void RefCreateBatchNormalizationWorkloadTest(DataLayout dataLayout)
telsoa014fcda012018-03-09 14:13:49 +0000181{
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100182 Graph graph;
telsoa014fcda012018-03-09 14:13:49 +0000183 RefWorkloadFactory factory;
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100184 auto workload = CreateBatchNormalizationWorkloadTest<BatchNormalizationWorkloadType, DataType>(factory,
185 graph,
186 dataLayout);
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100187
188 TensorShape inputShape;
189 TensorShape outputShape;
190
191 switch (dataLayout)
192 {
193 case DataLayout::NHWC:
Nikhil Rajd1340932018-10-18 14:27:50 +0100194 inputShape = { 2, 4, 4, 3 };
195 outputShape = { 2, 4, 4, 3 };
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100196 break;
197 case DataLayout::NCHW:
198 default:
Nikhil Rajd1340932018-10-18 14:27:50 +0100199 inputShape = { 2, 3, 4, 4 };
200 outputShape = { 2, 3, 4, 4 };
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100201 break;
202 }
telsoa014fcda012018-03-09 14:13:49 +0000203
telsoa01c577f2c2018-08-31 09:22:23 +0100204 // Checks that outputs and inputs are as we expect them (see definition of CreateBatchNormalizationWorkloadTest).
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100205 CheckInputOutput(std::move(workload), TensorInfo(inputShape, DataType), TensorInfo(outputShape, DataType));
206}
207
208BOOST_AUTO_TEST_CASE(CreateBatchNormalizationFloat32Workload)
209{
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100210 RefCreateBatchNormalizationWorkloadTest<RefBatchNormalizationWorkload,armnn::DataType::Float32>
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100211 (DataLayout::NCHW);
212}
213
214BOOST_AUTO_TEST_CASE(CreateBatchNormalizationFloat32WorkloadNhwc)
215{
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100216 RefCreateBatchNormalizationWorkloadTest<RefBatchNormalizationWorkload, armnn::DataType::Float32>
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100217 (DataLayout::NHWC);
218}
219
220BOOST_AUTO_TEST_CASE(CreateBatchNormalizationUint8Workload)
221{
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100222 RefCreateBatchNormalizationWorkloadTest<RefBatchNormalizationWorkload, armnn::DataType::QuantisedAsymm8>
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100223 (DataLayout::NCHW);
224}
225
226BOOST_AUTO_TEST_CASE(CreateBatchNormalizationUint8WorkloadNhwc)
227{
Matteo Martincigh3122bd52019-06-03 16:54:25 +0100228 RefCreateBatchNormalizationWorkloadTest<RefBatchNormalizationWorkload, armnn::DataType::QuantisedAsymm8>
Matteo Martincigh3dc43032018-10-18 10:55:19 +0100229 (DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000230}
231
telsoa01c577f2c2018-08-31 09:22:23 +0100232BOOST_AUTO_TEST_CASE(CreateConvertFp16ToFp32Float32Workload)
233{
234 Graph graph;
235 RefWorkloadFactory factory;
236 auto workload = CreateConvertFp16ToFp32WorkloadTest<RefConvertFp16ToFp32Workload>(factory, graph);
237
238 // Checks that outputs and inputs are as we expect them
239 CheckInputOutput(
240 std::move(workload), TensorInfo({1, 3, 2, 3}, DataType::Float16), TensorInfo({1, 3, 2, 3}, DataType::Float32));
241}
242
243BOOST_AUTO_TEST_CASE(CreateConvertFp32ToFp16Float16Workload)
244{
245 Graph graph;
246 RefWorkloadFactory factory;
247 auto workload = CreateConvertFp32ToFp16WorkloadTest<RefConvertFp32ToFp16Workload>(factory, graph);
248
249 // Checks that outputs and inputs are as we expect them
250 CheckInputOutput(
251 std::move(workload), TensorInfo({1, 3, 2, 3}, DataType::Float32), TensorInfo({1, 3, 2, 3}, DataType::Float16));
252}
253
Nikhil Raje4dfd6e2018-10-18 10:11:04 +0100254static void RefCreateConvolution2dWorkloadTest(DataLayout dataLayout = DataLayout::NCHW)
telsoa014fcda012018-03-09 14:13:49 +0000255{
Nikhil Raje4dfd6e2018-10-18 10:11:04 +0100256 Graph graph;
telsoa014fcda012018-03-09 14:13:49 +0000257 RefWorkloadFactory factory;
Mike Kelly9b398322019-05-22 17:21:49 +0100258 auto workload = CreateConvolution2dWorkloadTest<RefConvolution2dWorkload, DataType::Float32>
Nikhil Raje4dfd6e2018-10-18 10:11:04 +0100259 (factory, graph, dataLayout);
260
261 std::initializer_list<unsigned int> inputShape = (dataLayout == DataLayout::NCHW) ?
262 std::initializer_list<unsigned int>({2, 3, 8, 16}) : std::initializer_list<unsigned int>({2, 8, 16, 3});
263 std::initializer_list<unsigned int> outputShape = (dataLayout == DataLayout::NCHW) ?
264 std::initializer_list<unsigned int>({2, 2, 2, 10}) : std::initializer_list<unsigned int>({2, 2, 10, 2});
telsoa014fcda012018-03-09 14:13:49 +0000265
telsoa01c577f2c2018-08-31 09:22:23 +0100266 // Checks that outputs and inputs are as we expect them (see definition of CreateConvolution2dWorkloadTest).
telsoa014fcda012018-03-09 14:13:49 +0000267 CheckInputOutput(std::move(workload),
Nikhil Raje4dfd6e2018-10-18 10:11:04 +0100268 TensorInfo(inputShape, DataType::Float32),
269 TensorInfo(outputShape, DataType::Float32));
270}
271
272BOOST_AUTO_TEST_CASE(CreateConvolution2dFloatNchwWorkload)
273{
274 RefCreateConvolution2dWorkloadTest(DataLayout::NCHW);
275}
276
277BOOST_AUTO_TEST_CASE(CreateConvolution2dFloatNhwcWorkload)
278{
279 RefCreateConvolution2dWorkloadTest(DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000280}
281
Ruomei Yan495852f2019-05-23 11:37:33 +0100282static void RefCreateDepthwiseConvolutionWorkloadTest(DataLayout dataLayout)
283{
284 Graph graph;
285 RefWorkloadFactory factory;
286 auto workload = CreateDepthwiseConvolution2dWorkloadTest<RefDepthwiseConvolution2dWorkload, DataType::Float32>
287 (factory, graph, dataLayout);
288
289 std::initializer_list<unsigned int> inputShape = (dataLayout == DataLayout::NCHW)
290 ? std::initializer_list<unsigned int>({ 2, 2, 5, 5 })
291 : std::initializer_list<unsigned int>({ 2, 5, 5, 2 });
292 std::initializer_list<unsigned int> outputShape = (dataLayout == DataLayout::NCHW)
293 ? std::initializer_list<unsigned int>({ 2, 2, 5, 5 })
294 : std::initializer_list<unsigned int>({ 2, 5, 5, 2 });
295 // Checks that inputs/outputs are as we expect them (see definition of CreateDepthwiseConvolution2dWorkloadTest).
296 CheckInputOutput(std::move(workload),
297 TensorInfo(inputShape, DataType::Float32),
298 TensorInfo(outputShape, DataType::Float32));
299}
300
301BOOST_AUTO_TEST_CASE(CreateDepthwiseConvolutionFloat32NhwcWorkload)
302{
303 RefCreateDepthwiseConvolutionWorkloadTest(DataLayout::NHWC);
304}
305
telsoa01c577f2c2018-08-31 09:22:23 +0100306template <typename FullyConnectedWorkloadType, armnn::DataType DataType>
telsoa014fcda012018-03-09 14:13:49 +0000307static void RefCreateFullyConnectedWorkloadTest()
308{
309 Graph graph;
310 RefWorkloadFactory factory;
telsoa01c577f2c2018-08-31 09:22:23 +0100311 auto workload = CreateFullyConnectedWorkloadTest<FullyConnectedWorkloadType, DataType>(factory, graph);
telsoa014fcda012018-03-09 14:13:49 +0000312
telsoa01c577f2c2018-08-31 09:22:23 +0100313 // Checks that outputs and inputs are as we expect them (see definition of CreateFullyConnectedWorkloadTest).
314 float inputsQScale = DataType == armnn::DataType::QuantisedAsymm8 ? 1.0f : 0.0;
315 float outputQScale = DataType == armnn::DataType::QuantisedAsymm8 ? 2.0f : 0.0;
telsoa014fcda012018-03-09 14:13:49 +0000316 CheckInputOutput(std::move(workload),
telsoa01c577f2c2018-08-31 09:22:23 +0100317 TensorInfo({ 3, 1, 4, 5 }, DataType, inputsQScale),
318 TensorInfo({ 3, 7 }, DataType, outputQScale));
telsoa014fcda012018-03-09 14:13:49 +0000319}
320
Francis Murtagh43aec582019-05-27 12:14:10 +0100321BOOST_AUTO_TEST_CASE(CreateFullyConnectedWorkloadFloat32)
telsoa014fcda012018-03-09 14:13:49 +0000322{
Francis Murtagh43aec582019-05-27 12:14:10 +0100323 RefCreateFullyConnectedWorkloadTest<RefFullyConnectedWorkload, armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +0000324}
325
Francis Murtagh43aec582019-05-27 12:14:10 +0100326BOOST_AUTO_TEST_CASE(CreateFullyConnectedWorkloadQuantisedAsymm8)
telsoa014fcda012018-03-09 14:13:49 +0000327{
Francis Murtagh43aec582019-05-27 12:14:10 +0100328 RefCreateFullyConnectedWorkloadTest<RefFullyConnectedWorkload, armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000329}
330
Francis Murtagh46c09d02019-05-28 08:15:28 +0100331BOOST_AUTO_TEST_CASE(CreateFullyConnectedWorkloadQuantisedAsymm16)
332{
333 RefCreateFullyConnectedWorkloadTest<RefFullyConnectedWorkload, armnn::DataType::QuantisedSymm16>();
334}
335
narpra0155a97bc2018-10-02 14:35:53 +0100336template <typename NormalizationWorkloadType, armnn::DataType DataType>
Matteo Martincigha160b242018-10-18 10:33:23 +0100337static void RefCreateNormalizationWorkloadTest(DataLayout dataLayout)
telsoa014fcda012018-03-09 14:13:49 +0000338{
narpra0155a97bc2018-10-02 14:35:53 +0100339 Graph graph;
telsoa014fcda012018-03-09 14:13:49 +0000340 RefWorkloadFactory factory;
Matteo Martincigha160b242018-10-18 10:33:23 +0100341 auto workload = CreateNormalizationWorkloadTest<NormalizationWorkloadType, DataType>(factory, graph, dataLayout);
342
343 TensorShape inputShape;
344 TensorShape outputShape;
345
346 switch (dataLayout)
347 {
348 case DataLayout::NHWC:
349 inputShape = { 3, 1, 5, 5 };
350 outputShape = { 3, 1, 5, 5 };
351 break;
352 case DataLayout::NCHW:
353 default:
354 inputShape = { 3, 5, 5, 1 };
355 outputShape = { 3, 5, 5, 1 };
356 break;
357 }
telsoa014fcda012018-03-09 14:13:49 +0000358
telsoa01c577f2c2018-08-31 09:22:23 +0100359 // Checks that outputs and inputs are as we expect them (see definition of CreateNormalizationWorkloadTest).
Matteo Martincigha160b242018-10-18 10:33:23 +0100360 CheckInputOutput(std::move(workload), TensorInfo(inputShape, DataType), TensorInfo(outputShape, DataType));
narpra0155a97bc2018-10-02 14:35:53 +0100361}
362
363BOOST_AUTO_TEST_CASE(CreateRefNormalizationNchwWorkload)
364{
Matteo Martincigha160b242018-10-18 10:33:23 +0100365 RefCreateNormalizationWorkloadTest<RefNormalizationFloat32Workload, armnn::DataType::Float32>(DataLayout::NCHW);
366}
367
368BOOST_AUTO_TEST_CASE(CreateRefNormalizationNhwcWorkload)
369{
370 RefCreateNormalizationWorkloadTest<RefNormalizationFloat32Workload, armnn::DataType::Float32>(DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000371}
372
telsoa01c577f2c2018-08-31 09:22:23 +0100373template <typename Pooling2dWorkloadType, armnn::DataType DataType>
James Conroy69482272018-10-19 10:41:35 +0100374static void RefCreatePooling2dWorkloadTest(DataLayout dataLayout)
telsoa014fcda012018-03-09 14:13:49 +0000375{
376 Graph graph;
377 RefWorkloadFactory factory;
James Conroy69482272018-10-19 10:41:35 +0100378 auto workload = CreatePooling2dWorkloadTest<Pooling2dWorkloadType, DataType>(factory, graph, dataLayout);
379
380 TensorShape inputShape;
381 TensorShape outputShape;
382
383 switch (dataLayout)
384 {
385 case DataLayout::NHWC:
386 inputShape = { 3, 5, 5, 2 };
387 outputShape = { 3, 2, 4, 2 };
388 break;
389 case DataLayout::NCHW:
390 default:
391 inputShape = { 3, 2, 5, 5 };
392 outputShape = { 3, 2, 2, 4 };
393 }
telsoa014fcda012018-03-09 14:13:49 +0000394
telsoa01c577f2c2018-08-31 09:22:23 +0100395 // Checks that outputs and inputs are as we expect them (see definition of CreatePooling2dWorkloadTest).
James Conroy69482272018-10-19 10:41:35 +0100396 CheckInputOutput(std::move(workload),
397 TensorInfo(inputShape, DataType),
398 TensorInfo(outputShape, DataType));
telsoa014fcda012018-03-09 14:13:49 +0000399}
400
401BOOST_AUTO_TEST_CASE(CreatePooling2dFloat32Workload)
402{
James Conroy69482272018-10-19 10:41:35 +0100403 RefCreatePooling2dWorkloadTest<RefPooling2dFloat32Workload, armnn::DataType::Float32>(DataLayout::NCHW);
404}
405
406BOOST_AUTO_TEST_CASE(CreatePooling2dFloat32NhwcWorkload)
407{
408 RefCreatePooling2dWorkloadTest<RefPooling2dFloat32Workload, armnn::DataType::Float32>(DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000409}
410
411BOOST_AUTO_TEST_CASE(CreatePooling2dUint8Workload)
412{
James Conroy69482272018-10-19 10:41:35 +0100413 RefCreatePooling2dWorkloadTest<RefPooling2dUint8Workload, armnn::DataType::QuantisedAsymm8>(DataLayout::NCHW);
414}
415
416BOOST_AUTO_TEST_CASE(CreatePooling2dUint8NhwcWorkload)
417{
418 RefCreatePooling2dWorkloadTest<RefPooling2dUint8Workload, armnn::DataType::QuantisedAsymm8>(DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000419}
420
telsoa01c577f2c2018-08-31 09:22:23 +0100421template <typename SoftmaxWorkloadType, armnn::DataType DataType>
telsoa014fcda012018-03-09 14:13:49 +0000422static void RefCreateSoftmaxWorkloadTest()
423{
424 Graph graph;
425 RefWorkloadFactory factory;
telsoa01c577f2c2018-08-31 09:22:23 +0100426 auto workload = CreateSoftmaxWorkloadTest<SoftmaxWorkloadType, DataType>(factory, graph);
telsoa014fcda012018-03-09 14:13:49 +0000427
telsoa01c577f2c2018-08-31 09:22:23 +0100428 // Checks that outputs and inputs are as we expect them (see definition of CreateSoftmaxWorkloadTest).
telsoa014fcda012018-03-09 14:13:49 +0000429 CheckInputOutput(
430 std::move(workload),
telsoa01c577f2c2018-08-31 09:22:23 +0100431 TensorInfo({4, 1}, DataType),
432 TensorInfo({4, 1}, DataType));
telsoa014fcda012018-03-09 14:13:49 +0000433}
434
435BOOST_AUTO_TEST_CASE(CreateSoftmaxFloat32Workload)
436{
nikraj01a121de32019-05-29 10:51:05 +0100437 RefCreateSoftmaxWorkloadTest<RefSoftmaxWorkload, armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +0000438}
439
nikraj01a121de32019-05-29 10:51:05 +0100440BOOST_AUTO_TEST_CASE(CreateSoftmaxQuantisedAsymm8Workload)
telsoa014fcda012018-03-09 14:13:49 +0000441{
nikraj01a121de32019-05-29 10:51:05 +0100442 RefCreateSoftmaxWorkloadTest<RefSoftmaxWorkload, armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000443}
444
nikraj01248683f2019-05-29 16:46:50 +0100445BOOST_AUTO_TEST_CASE(CreateSoftmaxQuantisedSymm16Workload)
446{
447 RefCreateSoftmaxWorkloadTest<RefSoftmaxWorkload, armnn::DataType::QuantisedSymm16>();
448}
449
telsoa01c577f2c2018-08-31 09:22:23 +0100450template <typename SplitterWorkloadType, armnn::DataType DataType>
telsoa014fcda012018-03-09 14:13:49 +0000451static void RefCreateSplitterWorkloadTest()
452{
453 Graph graph;
454 RefWorkloadFactory factory;
telsoa01c577f2c2018-08-31 09:22:23 +0100455 auto workload = CreateSplitterWorkloadTest<SplitterWorkloadType, DataType>(factory, graph);
telsoa014fcda012018-03-09 14:13:49 +0000456
telsoa01c577f2c2018-08-31 09:22:23 +0100457 // Checks that outputs are as we expect them (see definition of CreateSplitterWorkloadTest).
telsoa014fcda012018-03-09 14:13:49 +0000458 SplitterQueueDescriptor queueDescriptor = workload->GetData();
459 auto inputHandle = boost::polymorphic_downcast<ConstCpuTensorHandle*>(queueDescriptor.m_Inputs[0]);
telsoa01c577f2c2018-08-31 09:22:23 +0100460 BOOST_TEST((inputHandle->GetTensorInfo() == TensorInfo({ 5, 7, 7 }, DataType)));
surmeh013537c2c2018-05-18 16:31:43 +0100461
telsoa014fcda012018-03-09 14:13:49 +0000462 auto outputHandle0 = boost::polymorphic_downcast<CpuTensorHandle*>(queueDescriptor.m_Outputs[0]);
telsoa01c577f2c2018-08-31 09:22:23 +0100463 BOOST_TEST((outputHandle0->GetTensorInfo() == TensorInfo({ 1, 7, 7 }, DataType)));
surmeh013537c2c2018-05-18 16:31:43 +0100464
telsoa014fcda012018-03-09 14:13:49 +0000465 auto outputHandle1 = boost::polymorphic_downcast<CpuTensorHandle*>(queueDescriptor.m_Outputs[1]);
telsoa01c577f2c2018-08-31 09:22:23 +0100466 BOOST_TEST((outputHandle1->GetTensorInfo() == TensorInfo({ 2, 7, 7 }, DataType)));
surmeh013537c2c2018-05-18 16:31:43 +0100467
telsoa014fcda012018-03-09 14:13:49 +0000468 auto outputHandle2 = boost::polymorphic_downcast<CpuTensorHandle*>(queueDescriptor.m_Outputs[2]);
telsoa01c577f2c2018-08-31 09:22:23 +0100469 BOOST_TEST((outputHandle2->GetTensorInfo() == TensorInfo({ 2, 7, 7 }, DataType)));
telsoa014fcda012018-03-09 14:13:49 +0000470}
471
472BOOST_AUTO_TEST_CASE(CreateSplitterFloat32Workload)
473{
Ruomei Yan25339c32019-05-28 16:48:20 +0100474 RefCreateSplitterWorkloadTest<RefSplitterWorkload, armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +0000475}
476
477BOOST_AUTO_TEST_CASE(CreateSplitterUint8Workload)
478{
Ruomei Yan25339c32019-05-28 16:48:20 +0100479 RefCreateSplitterWorkloadTest<RefSplitterWorkload, armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000480}
481
Jim Flynne242f2d2019-05-22 14:24:13 +0100482template <typename SplitterWorkloadType, typename ConcatWorkloadType, armnn::DataType DataType>
483static void RefCreateSplitterConcatWorkloadTest()
telsoa014fcda012018-03-09 14:13:49 +0000484{
telsoa01c577f2c2018-08-31 09:22:23 +0100485 // Tests that it is possible to decide which output of the splitter layer
Jim Flynne242f2d2019-05-22 14:24:13 +0100486 // should be lined to which input of the concat layer.
telsoa01c577f2c2018-08-31 09:22:23 +0100487 // We tested that is is possible to specify 0th output
Jim Flynne242f2d2019-05-22 14:24:13 +0100488 // of the splitter to be the 1st input to the concat and the 1st output of the splitter to be 0th input
489 // of the concat.
telsoa014fcda012018-03-09 14:13:49 +0000490
491 Graph graph;
492 RefWorkloadFactory factory;
Jim Flynne242f2d2019-05-22 14:24:13 +0100493 auto workloads = CreateSplitterConcatWorkloadTest<SplitterWorkloadType, ConcatWorkloadType, DataType>
494 (factory, graph);
telsoa014fcda012018-03-09 14:13:49 +0000495
496 auto wlSplitter = std::move(workloads.first);
Jim Flynne242f2d2019-05-22 14:24:13 +0100497 auto wlConcat = std::move(workloads.second);
telsoa014fcda012018-03-09 14:13:49 +0000498
telsoa01c577f2c2018-08-31 09:22:23 +0100499 //Checks that the index of inputs/outputs matches what we declared on InputDescriptor construction.
telsoa014fcda012018-03-09 14:13:49 +0000500 armnn::CpuTensorHandle* sOut0 = dynamic_cast<armnn::CpuTensorHandle*>(wlSplitter->GetData().m_Outputs[0]);
501 armnn::CpuTensorHandle* sOut1 = dynamic_cast<armnn::CpuTensorHandle*>(wlSplitter->GetData().m_Outputs[1]);
Jim Flynne242f2d2019-05-22 14:24:13 +0100502 armnn::CpuTensorHandle* mIn0 = dynamic_cast<armnn::CpuTensorHandle*>(wlConcat->GetData().m_Inputs[0]);
503 armnn::CpuTensorHandle* mIn1 = dynamic_cast<armnn::CpuTensorHandle*>(wlConcat->GetData().m_Inputs[1]);
telsoa014fcda012018-03-09 14:13:49 +0000504
505 BOOST_TEST(sOut0);
506 BOOST_TEST(sOut1);
507 BOOST_TEST(mIn0);
508 BOOST_TEST(mIn1);
509
510 bool validDataPointers = (sOut0 == mIn1) && (sOut1 == mIn0);
511
512 BOOST_TEST(validDataPointers);
513}
514
Jim Flynne242f2d2019-05-22 14:24:13 +0100515BOOST_AUTO_TEST_CASE(CreateSplitterConcatFloat32)
telsoa014fcda012018-03-09 14:13:49 +0000516{
Ruomei Yan25339c32019-05-28 16:48:20 +0100517 RefCreateSplitterConcatWorkloadTest<RefSplitterWorkload, RefConcatWorkload, DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +0000518}
519
Jim Flynne242f2d2019-05-22 14:24:13 +0100520BOOST_AUTO_TEST_CASE(CreateSplitterConcatUint8)
telsoa014fcda012018-03-09 14:13:49 +0000521{
Ruomei Yan25339c32019-05-28 16:48:20 +0100522 RefCreateSplitterConcatWorkloadTest<RefSplitterWorkload, RefConcatWorkload, DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000523}
524
telsoa01c577f2c2018-08-31 09:22:23 +0100525template <typename SplitterWorkloadType, typename ActivationWorkloadType, armnn::DataType DataType>
telsoa014fcda012018-03-09 14:13:49 +0000526static void RefCreateSingleOutputMultipleInputsTest()
527{
telsoa01c577f2c2018-08-31 09:22:23 +0100528 // Tests that it is possible to assign multiple (two) different layers to each of the outputs of a splitter layer.
529 // We created a splitter with two outputs. That each of those outputs is used by two different activation layers.
telsoa014fcda012018-03-09 14:13:49 +0000530
531 Graph graph;
532 RefWorkloadFactory factory;
533 std::unique_ptr<SplitterWorkloadType> wlSplitter;
534 std::unique_ptr<ActivationWorkloadType> wlActiv0_0;
535 std::unique_ptr<ActivationWorkloadType> wlActiv0_1;
536 std::unique_ptr<ActivationWorkloadType> wlActiv1_0;
537 std::unique_ptr<ActivationWorkloadType> wlActiv1_1;
538
539 CreateSplitterMultipleInputsOneOutputWorkloadTest<SplitterWorkloadType,
telsoa01c577f2c2018-08-31 09:22:23 +0100540 ActivationWorkloadType, DataType>(factory, graph, wlSplitter, wlActiv0_0, wlActiv0_1, wlActiv1_0, wlActiv1_1);
telsoa014fcda012018-03-09 14:13:49 +0000541
542 armnn::CpuTensorHandle* sOut0 = dynamic_cast<armnn::CpuTensorHandle*>(wlSplitter->GetData().m_Outputs[0]);
543 armnn::CpuTensorHandle* sOut1 = dynamic_cast<armnn::CpuTensorHandle*>(wlSplitter->GetData().m_Outputs[1]);
544 armnn::CpuTensorHandle* activ0_0Im = dynamic_cast<armnn::CpuTensorHandle*>(wlActiv0_0->GetData().m_Inputs[0]);
545 armnn::CpuTensorHandle* activ0_1Im = dynamic_cast<armnn::CpuTensorHandle*>(wlActiv0_1->GetData().m_Inputs[0]);
546 armnn::CpuTensorHandle* activ1_0Im = dynamic_cast<armnn::CpuTensorHandle*>(wlActiv1_0->GetData().m_Inputs[0]);
547 armnn::CpuTensorHandle* activ1_1Im = dynamic_cast<armnn::CpuTensorHandle*>(wlActiv1_1->GetData().m_Inputs[0]);
548
549
550 BOOST_TEST(sOut0);
551 BOOST_TEST(sOut1);
552 BOOST_TEST(activ0_0Im);
553 BOOST_TEST(activ0_1Im);
554 BOOST_TEST(activ1_0Im);
555 BOOST_TEST(activ1_1Im);
556
557 bool validDataPointers = (sOut0 == activ0_0Im) && (sOut0 == activ0_1Im) &&
558 (sOut1 == activ1_0Im) && (sOut1 == activ1_1Im);
559
560 BOOST_TEST(validDataPointers);
561}
562
563BOOST_AUTO_TEST_CASE(CreateSingleOutputMultipleInputsFloat32)
564{
Ruomei Yan25339c32019-05-28 16:48:20 +0100565 RefCreateSingleOutputMultipleInputsTest<RefSplitterWorkload, RefActivationWorkload,
telsoa01c577f2c2018-08-31 09:22:23 +0100566 armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +0000567}
568
569BOOST_AUTO_TEST_CASE(CreateSingleOutputMultipleInputsUint8)
570{
Ruomei Yan25339c32019-05-28 16:48:20 +0100571 RefCreateSingleOutputMultipleInputsTest<RefSplitterWorkload, RefActivationWorkload,
telsoa01c577f2c2018-08-31 09:22:23 +0100572 armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000573}
574
telsoa01c577f2c2018-08-31 09:22:23 +0100575template <typename ResizeBilinearWorkloadType, armnn::DataType DataType>
James Conroy59540822018-10-11 12:39:05 +0100576static void RefCreateResizeBilinearTest(DataLayout dataLayout)
telsoa014fcda012018-03-09 14:13:49 +0000577{
578 Graph graph;
579 RefWorkloadFactory factory;
James Conroy59540822018-10-11 12:39:05 +0100580 auto workload = CreateResizeBilinearWorkloadTest<ResizeBilinearWorkloadType, DataType>(factory, graph, dataLayout);
581
582 TensorShape inputShape;
583 TensorShape outputShape;
584
585 switch (dataLayout)
586 {
587 case DataLayout::NHWC:
588 inputShape = { 2, 4, 4, 3 };
589 outputShape = { 2, 2, 2, 3 };
590 break;
James Conroy69482272018-10-19 10:41:35 +0100591 case DataLayout::NCHW:
592 default:
James Conroy59540822018-10-11 12:39:05 +0100593 inputShape = { 2, 3, 4, 4 };
594 outputShape = { 2, 3, 2, 2 };
595 }
telsoa014fcda012018-03-09 14:13:49 +0000596
telsoa01c577f2c2018-08-31 09:22:23 +0100597 // Checks that outputs and inputs are as we expect them (see definition of CreateResizeBilinearWorkloadTest).
James Conroy69482272018-10-19 10:41:35 +0100598 CheckInputOutput(std::move(workload),
599 TensorInfo(inputShape, DataType),
600 TensorInfo(outputShape, DataType));
telsoa014fcda012018-03-09 14:13:49 +0000601}
602
603BOOST_AUTO_TEST_CASE(CreateResizeBilinearFloat32)
604{
James Conroy59540822018-10-11 12:39:05 +0100605 RefCreateResizeBilinearTest<RefResizeBilinearFloat32Workload, armnn::DataType::Float32>(DataLayout::NCHW);
telsoa014fcda012018-03-09 14:13:49 +0000606}
607
608BOOST_AUTO_TEST_CASE(CreateResizeBilinearUint8)
609{
James Conroy59540822018-10-11 12:39:05 +0100610 RefCreateResizeBilinearTest<RefResizeBilinearUint8Workload, armnn::DataType::QuantisedAsymm8>(DataLayout::NCHW);
611}
612
613BOOST_AUTO_TEST_CASE(CreateResizeBilinearFloat32Nhwc)
614{
615 RefCreateResizeBilinearTest<RefResizeBilinearFloat32Workload, armnn::DataType::Float32>(DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000616}
617
nikraj0133732f62019-06-04 15:35:34 +0100618template <typename RsqrtWorkloadType, armnn::DataType DataType>
619static void RefCreateRsqrtTest()
620{
621 Graph graph;
622 RefWorkloadFactory factory;
623
624 auto workload = CreateRsqrtWorkloadTest<RsqrtWorkloadType, DataType>(factory, graph);
625
626 // Checks that outputs are as we expect them (see definition of CreateRsqrtWorkloadTest).
627 CheckInputOutput(std::move(workload),
628 TensorInfo({ 1, 1 }, DataType),
629 TensorInfo({ 1, 1 }, DataType));
630
631}
632
633BOOST_AUTO_TEST_CASE(CreateRsqrtFloat32)
634{
635 RefCreateRsqrtTest<RefRsqrtFloat32Workload, armnn::DataType::Float32>();
636}
637
Matteo Martincighb63973e2018-10-16 16:23:33 +0100638template <typename L2NormalizationWorkloadType, armnn::DataType DataType>
639static void RefCreateL2NormalizationTest(DataLayout dataLayout)
telsoa014fcda012018-03-09 14:13:49 +0000640{
641 Graph graph;
642 RefWorkloadFactory factory;
Matteo Martincighb63973e2018-10-16 16:23:33 +0100643 auto workload =
644 CreateL2NormalizationWorkloadTest<L2NormalizationWorkloadType, DataType>(factory, graph, dataLayout);
645
646 TensorShape inputShape;
647 TensorShape outputShape;
648
649 switch (dataLayout)
650 {
651 case DataLayout::NHWC:
652 inputShape = { 5, 50, 67, 20 };
653 outputShape = { 5, 50, 67, 20 };
654 break;
655 case DataLayout::NCHW:
656 default:
657 inputShape = { 5, 20, 50, 67 };
658 outputShape = { 5, 20, 50, 67 };
659 break;
660 }
telsoa014fcda012018-03-09 14:13:49 +0000661
telsoa01c577f2c2018-08-31 09:22:23 +0100662 // Checks that outputs and inputs are as we expect them (see definition of CreateL2NormalizationWorkloadTest).
Matteo Martincighb63973e2018-10-16 16:23:33 +0100663 CheckInputOutput(std::move(workload), TensorInfo(inputShape, DataType), TensorInfo(outputShape, DataType));
664}
665
666BOOST_AUTO_TEST_CASE(CreateL2NormalizationFloat32)
667{
668 RefCreateL2NormalizationTest<RefL2NormalizationFloat32Workload, armnn::DataType::Float32>(DataLayout::NCHW);
669}
670
671BOOST_AUTO_TEST_CASE(CreateL2NormalizationFloat32Nhwc)
672{
673 RefCreateL2NormalizationTest<RefL2NormalizationFloat32Workload, armnn::DataType::Float32>(DataLayout::NHWC);
telsoa014fcda012018-03-09 14:13:49 +0000674}
675
telsoa01c577f2c2018-08-31 09:22:23 +0100676template <typename ReshapeWorkloadType, armnn::DataType DataType>
telsoa014fcda012018-03-09 14:13:49 +0000677static void RefCreateReshapeWorkloadTest()
678{
679 Graph graph;
680 RefWorkloadFactory factory;
telsoa01c577f2c2018-08-31 09:22:23 +0100681 auto workload = CreateReshapeWorkloadTest<ReshapeWorkloadType, DataType>(factory, graph);
telsoa014fcda012018-03-09 14:13:49 +0000682
telsoa01c577f2c2018-08-31 09:22:23 +0100683 // Checks that outputs and inputs are as we expect them (see definition of CreateReshapeWorkloadTest).
telsoa014fcda012018-03-09 14:13:49 +0000684 CheckInputOutput(
685 std::move(workload),
telsoa01c577f2c2018-08-31 09:22:23 +0100686 TensorInfo({ 4, 1 }, DataType),
687 TensorInfo({ 1, 4 }, DataType));
telsoa014fcda012018-03-09 14:13:49 +0000688}
689
Nina Drozd8ed4b8c2019-05-29 10:41:04 +0100690BOOST_AUTO_TEST_CASE(CreateReshapeWorkloadFloat32)
telsoa014fcda012018-03-09 14:13:49 +0000691{
Nina Drozd2f2778f2019-05-27 10:37:05 +0100692 RefCreateReshapeWorkloadTest<RefReshapeWorkload, armnn::DataType::Float32>();
telsoa014fcda012018-03-09 14:13:49 +0000693}
694
Nina Drozd8ed4b8c2019-05-29 10:41:04 +0100695BOOST_AUTO_TEST_CASE(CreateReshapeWorkloadQuantisedAsymm8)
telsoa014fcda012018-03-09 14:13:49 +0000696{
Nina Drozd2f2778f2019-05-27 10:37:05 +0100697 RefCreateReshapeWorkloadTest<RefReshapeWorkload, armnn::DataType::QuantisedAsymm8>();
telsoa014fcda012018-03-09 14:13:49 +0000698}
699
Nina Drozd8ed4b8c2019-05-29 10:41:04 +0100700BOOST_AUTO_TEST_CASE(CreateReshapeWorkloadQuantisedSymm16)
701{
702 RefCreateReshapeWorkloadTest<RefReshapeWorkload, armnn::DataType::QuantisedSymm16>();
703}
704
Jim Flynne242f2d2019-05-22 14:24:13 +0100705template <typename ConcatWorkloadType, armnn::DataType DataType>
706static void RefCreateConcatWorkloadTest(const armnn::TensorShape& outputShape,
narpra015cdda352018-11-19 15:30:27 +0000707 unsigned int concatAxis)
708{
709 Graph graph;
710 RefWorkloadFactory factory;
Jim Flynne242f2d2019-05-22 14:24:13 +0100711 auto workload = CreateConcatWorkloadTest<ConcatWorkloadType, DataType>(factory, graph, outputShape, concatAxis);
narpra015cdda352018-11-19 15:30:27 +0000712
713 CheckInputsOutput(std::move(workload),
714 TensorInfo({ 2, 3, 2, 5 }, DataType),
715 TensorInfo({ 2, 3, 2, 5 }, DataType),
716 TensorInfo(outputShape, DataType));
717}
718
Jim Flynne242f2d2019-05-22 14:24:13 +0100719BOOST_AUTO_TEST_CASE(CreateConcatDim0Float32Workload)
narpra015cdda352018-11-19 15:30:27 +0000720{
Jim Flynne242f2d2019-05-22 14:24:13 +0100721 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::Float32>({ 4, 3, 2, 5 }, 0);
narpra015cdda352018-11-19 15:30:27 +0000722}
723
Jim Flynne242f2d2019-05-22 14:24:13 +0100724BOOST_AUTO_TEST_CASE(CreateConcatDim0Uint8Workload)
narpra015cdda352018-11-19 15:30:27 +0000725{
Jim Flynne242f2d2019-05-22 14:24:13 +0100726 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::QuantisedAsymm8>({ 4, 3, 2, 5 }, 0);
Jim Flynncbb66aa2019-05-15 13:03:54 +0100727}
728
Jim Flynne242f2d2019-05-22 14:24:13 +0100729BOOST_AUTO_TEST_CASE(CreateConcatDim0Uint16Workload)
Jim Flynncbb66aa2019-05-15 13:03:54 +0100730{
Jim Flynne242f2d2019-05-22 14:24:13 +0100731 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::QuantisedSymm16>({ 4, 3, 2, 5 }, 0);
narpra015cdda352018-11-19 15:30:27 +0000732}
733
Jim Flynne242f2d2019-05-22 14:24:13 +0100734BOOST_AUTO_TEST_CASE(CreateConcatDim1Float32Workload)
narpra015cdda352018-11-19 15:30:27 +0000735{
Jim Flynne242f2d2019-05-22 14:24:13 +0100736 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::Float32>({ 2, 6, 2, 5 }, 1);
narpra015cdda352018-11-19 15:30:27 +0000737}
738
Jim Flynne242f2d2019-05-22 14:24:13 +0100739BOOST_AUTO_TEST_CASE(CreateConcatDim1Uint8Workload)
narpra015cdda352018-11-19 15:30:27 +0000740{
Jim Flynne242f2d2019-05-22 14:24:13 +0100741 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::QuantisedAsymm8>({ 2, 6, 2, 5 }, 1);
narpra015cdda352018-11-19 15:30:27 +0000742}
743
Jim Flynne242f2d2019-05-22 14:24:13 +0100744BOOST_AUTO_TEST_CASE(CreateConcatDim2Float32Workload)
narpra015cdda352018-11-19 15:30:27 +0000745{
Jim Flynne242f2d2019-05-22 14:24:13 +0100746 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::Float32>({ 2, 3, 4, 5 }, 2);
narpra015cdda352018-11-19 15:30:27 +0000747}
748
Jim Flynne242f2d2019-05-22 14:24:13 +0100749BOOST_AUTO_TEST_CASE(CreateConcatDim2Uint8Workload)
narpra015cdda352018-11-19 15:30:27 +0000750{
Jim Flynne242f2d2019-05-22 14:24:13 +0100751 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::QuantisedAsymm8>({ 2, 3, 4, 5 }, 2);
narpra015cdda352018-11-19 15:30:27 +0000752}
753
Jim Flynne242f2d2019-05-22 14:24:13 +0100754BOOST_AUTO_TEST_CASE(CreateConcatDim3Float32Workload)
narpra015cdda352018-11-19 15:30:27 +0000755{
Jim Flynne242f2d2019-05-22 14:24:13 +0100756 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::Float32>({ 2, 3, 2, 10 }, 3);
narpra015cdda352018-11-19 15:30:27 +0000757}
758
Jim Flynne242f2d2019-05-22 14:24:13 +0100759BOOST_AUTO_TEST_CASE(CreateConcatDim3Uint8Workload)
narpra015cdda352018-11-19 15:30:27 +0000760{
Jim Flynne242f2d2019-05-22 14:24:13 +0100761 RefCreateConcatWorkloadTest<RefConcatWorkload, armnn::DataType::QuantisedAsymm8>({ 2, 3, 2, 10 }, 3);
narpra015cdda352018-11-19 15:30:27 +0000762}
763
Nina Drozd58ef2c62019-05-16 12:09:18 +0100764template <typename ConstantWorkloadType, armnn::DataType DataType>
765static void RefCreateConstantWorkloadTest(const armnn::TensorShape& outputShape)
766{
767 armnn::Graph graph;
768 RefWorkloadFactory factory;
769 auto workload = CreateConstantWorkloadTest<ConstantWorkloadType, DataType>(factory, graph, outputShape);
770
771 // Check output is as expected
772 auto queueDescriptor = workload->GetData();
773 auto outputHandle = boost::polymorphic_downcast<CpuTensorHandle*>(queueDescriptor.m_Outputs[0]);
774 BOOST_TEST((outputHandle->GetTensorInfo() == TensorInfo(outputShape, DataType)));
775}
776
777BOOST_AUTO_TEST_CASE(CreateConstantUint8Workload)
778{
779 RefCreateConstantWorkloadTest<RefConstantWorkload, armnn::DataType::QuantisedAsymm8>({ 2, 3, 2, 10 });
780}
781
782BOOST_AUTO_TEST_CASE(CreateConstantInt16Workload)
783{
784 RefCreateConstantWorkloadTest<RefConstantWorkload, armnn::DataType::QuantisedSymm16>({ 2, 3, 2, 10 });
785}
786
787BOOST_AUTO_TEST_CASE(CreateConstantFloat32Workload)
788{
789 RefCreateConstantWorkloadTest<RefConstantWorkload, armnn::DataType::Float32>({ 2, 3, 2, 10 });
790}
791
792BOOST_AUTO_TEST_CASE(CreateConstantSigned32Workload)
793{
794 RefCreateConstantWorkloadTest<RefConstantWorkload, armnn::DataType::Signed32>({ 2, 3, 2, 10 });
795}
796
telsoa014fcda012018-03-09 14:13:49 +0000797BOOST_AUTO_TEST_SUITE_END()