blob: 6f3a9d324856b1d0edcf0bc8875af8fa2a07a19a [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00007#include <Graph.hpp>
8
9#include <backendsCommon/WorkloadFactory.hpp>
telsoa014fcda012018-03-09 14:13:49 +000010
11#include <boost/core/ignore_unused.hpp>
12
13namespace
14{
15armnn::Graph dummyGraph;
16
telsoa01c577f2c2018-08-31 09:22:23 +010017// Make a dummy TensorInfo object.
telsoa014fcda012018-03-09 14:13:49 +000018template<armnn::DataType DataType>
19armnn::TensorInfo MakeDummyTensorInfo()
20{
21 return armnn::TensorInfo({2,2,2,2}, DataType);
22}
23
24
25// Make a dummy WorkloadInfo using a dummy TensorInfo.
26template<armnn::DataType DataType>
27armnn::WorkloadInfo MakeDummyWorkloadInfo(unsigned int numInputs, unsigned int numOutputs)
28{
29 armnn::WorkloadInfo info;
30 for (unsigned int i=0; i < numInputs; i++)
31 {
32 info.m_InputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
33 }
34 for (unsigned int o=0; o < numOutputs; o++)
35 {
36 info.m_OutputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
37 }
38 return info;
39}
40
telsoa01c577f2c2018-08-31 09:22:23 +010041// Template class to create a dummy layer (2 parameters).
telsoa014fcda012018-03-09 14:13:49 +000042template<typename LayerType, typename DescType = typename LayerType::DescriptorType>
43struct DummyLayer
44{
45 DummyLayer()
46 {
47 m_Layer = dummyGraph.AddLayer<LayerType>(DescType(), "");
48 }
49 ~DummyLayer()
50 {
51 dummyGraph.EraseLayer(m_Layer);
52 }
53 LayerType* m_Layer;
54};
55
telsoa01c577f2c2018-08-31 09:22:23 +010056// Template class to create a dummy layer (1 parameter).
telsoa014fcda012018-03-09 14:13:49 +000057template<typename LayerType>
58struct DummyLayer<LayerType, void>
59{
60 DummyLayer()
61 {
62 m_Layer = dummyGraph.AddLayer<LayerType>("");
63 }
64 ~DummyLayer()
65 {
66 dummyGraph.EraseLayer(m_Layer);
67 }
68 LayerType* m_Layer;
69};
70
71template<>
telsoa01c577f2c2018-08-31 09:22:23 +010072struct DummyLayer<armnn::BatchNormalizationLayer>
73{
74 DummyLayer()
75 {
76 m_Layer = dummyGraph.AddLayer<armnn::BatchNormalizationLayer>(armnn::BatchNormalizationDescriptor(), "");
77 m_Layer->m_Mean = std::make_unique<armnn::ScopedCpuTensorHandle>(
78 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
79 m_Layer->m_Variance = std::make_unique<armnn::ScopedCpuTensorHandle>(
80 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
81 m_Layer->m_Beta = std::make_unique<armnn::ScopedCpuTensorHandle>(
82 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
83 m_Layer->m_Gamma = std::make_unique<armnn::ScopedCpuTensorHandle>(
84 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
85 }
86 ~DummyLayer()
87 {
88 dummyGraph.EraseLayer(m_Layer);
89 }
90 armnn::BatchNormalizationLayer* m_Layer;
91
92};
93
94template<>
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +000095struct DummyLayer<armnn::BatchToSpaceNdLayer>
96{
97 DummyLayer()
98 {
99 m_Layer = dummyGraph.AddLayer<armnn::BatchToSpaceNdLayer>(armnn::BatchToSpaceNdDescriptor(), "");
100 }
101 ~DummyLayer()
102 {
103 dummyGraph.EraseLayer(m_Layer);
104 }
105 armnn::BatchToSpaceNdLayer* m_Layer;
106};
107
108template<>
telsoa014fcda012018-03-09 14:13:49 +0000109struct DummyLayer<armnn::ConstantLayer, void>
110{
111 DummyLayer()
112 {
telsoa01c577f2c2018-08-31 09:22:23 +0100113 m_Layer = dummyGraph.AddLayer<armnn::ConstantLayer>("");
telsoa014fcda012018-03-09 14:13:49 +0000114 }
115 ~DummyLayer()
116 {
117 dummyGraph.EraseLayer(m_Layer);
118 }
119 armnn::ConstantLayer* m_Layer;
120};
121
122template<>
123struct DummyLayer<armnn::InputLayer, armnn::LayerBindingId>
124{
125 DummyLayer()
126 {
127 m_Layer = dummyGraph.AddLayer<armnn::InputLayer>(armnn::LayerBindingId(), "");
128
129 }
130 ~DummyLayer()
131 {
132 dummyGraph.EraseLayer(m_Layer);
133 }
134 armnn::InputLayer* m_Layer;
135};
136
137template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100138struct DummyLayer<armnn::ConcatLayer>
telsoa014fcda012018-03-09 14:13:49 +0000139{
140 DummyLayer()
141 {
142 armnn::OriginsDescriptor desc(2);
Jim Flynne242f2d2019-05-22 14:24:13 +0100143 m_Layer = dummyGraph.AddLayer<armnn::ConcatLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000144
145 }
146 ~DummyLayer()
147 {
148 dummyGraph.EraseLayer(m_Layer);
149 }
Jim Flynne242f2d2019-05-22 14:24:13 +0100150 armnn::ConcatLayer* m_Layer;
telsoa014fcda012018-03-09 14:13:49 +0000151};
152
153template<>
154struct DummyLayer<armnn::OutputLayer, armnn::LayerBindingId>
155{
156 DummyLayer()
157 {
158 m_Layer = dummyGraph.AddLayer<armnn::OutputLayer>(armnn::LayerBindingId(), "");
159
160 }
161 ~DummyLayer()
162 {
163 dummyGraph.EraseLayer(m_Layer);
164 }
165 armnn::OutputLayer* m_Layer;
166};
167
168template<>
169struct DummyLayer<armnn::SplitterLayer>
170{
171 DummyLayer()
172 {
173 armnn::ViewsDescriptor desc(1);
174 m_Layer = dummyGraph.AddLayer<armnn::SplitterLayer>(desc, "");
175
176 }
177 ~DummyLayer()
178 {
179 dummyGraph.EraseLayer(m_Layer);
180 }
181 armnn::SplitterLayer* m_Layer;
182};
183
184template <typename ConvolutionLayerType>
185struct DummyConvolutionLayer
186{
187 DummyConvolutionLayer()
188 {
189 typename ConvolutionLayerType::DescriptorType desc;
190 m_Layer = dummyGraph.AddLayer<ConvolutionLayerType>(desc, "");
191 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
192 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
193 m_Layer->m_Bias = std::make_unique<armnn::ScopedCpuTensorHandle>(
194 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
195 }
196 ~DummyConvolutionLayer()
197 {
198 dummyGraph.EraseLayer(m_Layer);
199 }
200 ConvolutionLayerType* m_Layer;
201};
202
203template<>
204struct DummyLayer<armnn::Convolution2dLayer>
205 : public DummyConvolutionLayer<armnn::Convolution2dLayer>
206{
207};
208
209template<>
210struct DummyLayer<armnn::DepthwiseConvolution2dLayer>
211 : public DummyConvolutionLayer<armnn::DepthwiseConvolution2dLayer>
212{
213};
214
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100215template<>
216struct DummyLayer<armnn::TransposeConvolution2dLayer>
217 : public DummyConvolutionLayer<armnn::TransposeConvolution2dLayer>
218{
219};
220
telsoa01c577f2c2018-08-31 09:22:23 +0100221template <typename LstmLayerType>
222struct DummyLstmLayer
223{
224 DummyLstmLayer()
225 {
226 typename LstmLayerType::DescriptorType desc;
227 desc.m_CifgEnabled = false;
228
229 m_Layer = dummyGraph.AddLayer<LstmLayerType>(armnn::LstmDescriptor(), "");
230 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
231 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
232 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
233 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
234 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
235 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
236 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
237 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
238 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
239 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
240 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
241 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
242 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
243 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
244 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
245 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
246 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
247 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
248
249 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
250 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
251 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
252 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
253 m_Layer->m_CifgParameters.m_CellToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
254 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
255 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
256 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
257 }
258 ~DummyLstmLayer()
259 {
260 dummyGraph.EraseLayer(m_Layer);
261 }
262 armnn::LstmLayer* m_Layer;
263};
264
265template<>
266struct DummyLayer<armnn::LstmLayer>
267 : public DummyLstmLayer<armnn::LstmLayer>
268{
269};
270
271template<>
272struct DummyLayer<armnn::FullyConnectedLayer>
273{
274 DummyLayer()
275 {
276 armnn::FullyConnectedLayer::DescriptorType desc;
277 m_Layer = dummyGraph.AddLayer<armnn::FullyConnectedLayer>(desc, "");
278 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
279 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
280 }
281 ~DummyLayer()
282 {
283 dummyGraph.EraseLayer(m_Layer);
284 }
285 armnn::FullyConnectedLayer* m_Layer;
286};
287
telsoa014fcda012018-03-09 14:13:49 +0000288// Tag for giving LayerType entries a unique strong type each.
289template<armnn::LayerType>
290struct Tag{};
291
292#define DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, descType) \
293template<armnn::DataType DataType> \
294struct LayerTypePolicy<armnn::LayerType::name, DataType> \
295{ \
296 using Type = armnn::name##Layer; \
297 using Desc = descType; \
298 using QueueDesc = armnn::name##QueueDescriptor; \
299 constexpr static const char* NameStr = #name; \
300 \
301 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
302 unsigned int nIn, unsigned int nOut) \
303 { \
304 QueueDesc desc; \
305 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
306 return factory->Create##name(desc, info); \
307 } \
308};
309
telsoa01c577f2c2018-08-31 09:22:23 +0100310// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000311// Use this version for layers whose constructor takes 1 parameter(name).
312#define DECLARE_LAYER_POLICY_1_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, void)
313
telsoa01c577f2c2018-08-31 09:22:23 +0100314// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000315// Use this version for layers whose constructor takes 2 parameters(descriptor and name).
316#define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor)
317
telsoa01c577f2c2018-08-31 09:22:23 +0100318// Layer policy template.
telsoa014fcda012018-03-09 14:13:49 +0000319template<armnn::LayerType Type, armnn::DataType DataType>
320struct LayerTypePolicy;
321
322// Every entry in the armnn::LayerType enum must be accounted for below.
323DECLARE_LAYER_POLICY_2_PARAM(Activation)
324
325DECLARE_LAYER_POLICY_1_PARAM(Addition)
326
327DECLARE_LAYER_POLICY_2_PARAM(BatchNormalization)
328
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000329DECLARE_LAYER_POLICY_2_PARAM(BatchToSpaceNd)
330
Jim Flynne242f2d2019-05-22 14:24:13 +0100331DECLARE_LAYER_POLICY_2_PARAM(Concat)
332
telsoa014fcda012018-03-09 14:13:49 +0000333DECLARE_LAYER_POLICY_1_PARAM(Constant)
334
telsoa01c577f2c2018-08-31 09:22:23 +0100335DECLARE_LAYER_POLICY_1_PARAM(ConvertFp16ToFp32)
336
337DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToFp16)
338
telsoa014fcda012018-03-09 14:13:49 +0000339DECLARE_LAYER_POLICY_2_PARAM(Convolution2d)
340
341DECLARE_LAYER_POLICY_1_PARAM(MemCopy)
342
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000343DECLARE_LAYER_POLICY_1_PARAM(Debug)
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000344
telsoa014fcda012018-03-09 14:13:49 +0000345DECLARE_LAYER_POLICY_2_PARAM(DepthwiseConvolution2d)
346
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000347DECLARE_LAYER_POLICY_1_PARAM(Dequantize)
348
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000349DECLARE_LAYER_POLICY_2_PARAM(DetectionPostProcess)
350
FrancisMurtagh20995952018-12-17 12:11:36 +0000351DECLARE_LAYER_POLICY_1_PARAM(Equal)
352
telsoa014fcda012018-03-09 14:13:49 +0000353DECLARE_LAYER_POLICY_2_PARAM(FakeQuantization)
354
355DECLARE_LAYER_POLICY_1_PARAM(Floor)
356
357DECLARE_LAYER_POLICY_2_PARAM(FullyConnected)
358
narpra01b89b05f2019-01-16 09:53:09 +0000359DECLARE_LAYER_POLICY_1_PARAM(Gather)
360
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000361DECLARE_LAYER_POLICY_1_PARAM(Greater)
362
telsoa014fcda012018-03-09 14:13:49 +0000363DECLARE_LAYER_POLICY_CUSTOM_PARAM(Input, armnn::LayerBindingId)
364
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100365DECLARE_LAYER_POLICY_2_PARAM(L2Normalization)
telsoa014fcda012018-03-09 14:13:49 +0000366
telsoa01c577f2c2018-08-31 09:22:23 +0100367DECLARE_LAYER_POLICY_2_PARAM(Lstm)
368
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000369DECLARE_LAYER_POLICY_1_PARAM(Maximum)
370
narpra0132b90462018-09-13 11:07:48 +0100371DECLARE_LAYER_POLICY_2_PARAM(Mean)
372
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100373DECLARE_LAYER_POLICY_1_PARAM(Merge)
374
kevmay0190539692018-11-29 08:40:19 +0000375DECLARE_LAYER_POLICY_1_PARAM(Minimum)
376
telsoa014fcda012018-03-09 14:13:49 +0000377DECLARE_LAYER_POLICY_1_PARAM(Multiplication)
378
379DECLARE_LAYER_POLICY_2_PARAM(Normalization)
380
381DECLARE_LAYER_POLICY_CUSTOM_PARAM(Output, armnn::LayerBindingId)
382
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100383DECLARE_LAYER_POLICY_2_PARAM(Pad)
384
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000385DECLARE_LAYER_POLICY_1_PARAM(Quantize)
386
telsoa014fcda012018-03-09 14:13:49 +0000387DECLARE_LAYER_POLICY_2_PARAM(Permute)
388
389DECLARE_LAYER_POLICY_2_PARAM(Pooling2d)
390
Matteo Martincigh49124022019-01-11 13:25:59 +0000391DECLARE_LAYER_POLICY_2_PARAM(PreCompiled)
392
Matteo Martincigh0e406ee2019-06-12 15:42:18 +0100393DECLARE_LAYER_POLICY_1_PARAM(Prelu)
394
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100395DECLARE_LAYER_POLICY_1_PARAM(Division)
396
Teresa Charlina9075df2019-06-27 15:41:57 +0100397DECLARE_LAYER_POLICY_2_PARAM(Resize)
398
telsoa014fcda012018-03-09 14:13:49 +0000399DECLARE_LAYER_POLICY_2_PARAM(ResizeBilinear)
400
telsoa01c577f2c2018-08-31 09:22:23 +0100401DECLARE_LAYER_POLICY_2_PARAM(Reshape)
402
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000403DECLARE_LAYER_POLICY_1_PARAM(Rsqrt)
404
telsoa014fcda012018-03-09 14:13:49 +0000405DECLARE_LAYER_POLICY_2_PARAM(Softmax)
406
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000407DECLARE_LAYER_POLICY_2_PARAM(SpaceToBatchNd)
408
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100409DECLARE_LAYER_POLICY_2_PARAM(SpaceToDepth)
410
telsoa014fcda012018-03-09 14:13:49 +0000411DECLARE_LAYER_POLICY_2_PARAM(Splitter)
412
Conor Kennedy430b5d82018-11-14 15:28:28 +0000413DECLARE_LAYER_POLICY_2_PARAM(StridedSlice)
414
David Beckc2044fe2018-09-05 15:00:38 +0100415DECLARE_LAYER_POLICY_1_PARAM(Subtraction)
telsoa014fcda012018-03-09 14:13:49 +0000416
Sadik Armaganeff363d2019-04-05 15:25:46 +0100417DECLARE_LAYER_POLICY_1_PARAM(Switch)
418
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100419DECLARE_LAYER_POLICY_2_PARAM(TransposeConvolution2d)
420
telsoa014fcda012018-03-09 14:13:49 +0000421
422// Generic implementation to get the number of input slots for a given layer type;
423template<armnn::LayerType Type>
424unsigned int GetNumInputs(const armnn::Layer& layer)
425{
426 return layer.GetNumInputSlots();
427}
428
429// Generic implementation to get the number of output slots for a given layer type;
430template<armnn::LayerType Type>
431unsigned int GetNumOutputs(const armnn::Layer& layer)
432{
433 return layer.GetNumOutputSlots();
434}
435
436template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100437unsigned int GetNumInputs<armnn::LayerType::Concat>(const armnn::Layer& layer)
telsoa014fcda012018-03-09 14:13:49 +0000438{
439 boost::ignore_unused(layer);
440 return 2;
441}
442
telsoa01c577f2c2018-08-31 09:22:23 +0100443// Tests that the IsLayerSupported() function returns the correct value.
444// We determined the correct value by *trying* to create the relevant workload and seeing if it matches what we expect.
telsoa014fcda012018-03-09 14:13:49 +0000445// Returns true if expectations are met, otherwise returns false.
446template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
447bool IsLayerSupportedTest(FactoryType *factory, Tag<Type>)
448{
449 using LayerPolicy = LayerTypePolicy<Type, DataType>;
450 using LayerType = typename LayerPolicy::Type;
451 using LayerDesc = typename LayerPolicy::Desc;
452 DummyLayer<LayerType, LayerDesc> layer;
453
454 unsigned int numIn = GetNumInputs<Type>(*layer.m_Layer);
455 unsigned int numOut = GetNumOutputs<Type>(*layer.m_Layer);
456
telsoa01c577f2c2018-08-31 09:22:23 +0100457 // Make another dummy layer just to make IsLayerSupported have valid inputs.
telsoa014fcda012018-03-09 14:13:49 +0000458 DummyLayer<armnn::ConstantLayer, void> previousLayer;
telsoa01c577f2c2018-08-31 09:22:23 +0100459 // Set output of the previous layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000460 armnn::TensorInfo output = MakeDummyTensorInfo<DataType>();
461 previousLayer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
telsoa01c577f2c2018-08-31 09:22:23 +0100462 // Connect all outputs of the previous layer to inputs of tested layer.
telsoa014fcda012018-03-09 14:13:49 +0000463 for (unsigned int i = 0; i < numIn; i++)
464 {
465 armnn::IOutputSlot& previousLayerOutputSlot = previousLayer.m_Layer->GetOutputSlot(0);
466 armnn::IInputSlot& layerInputSlot = layer.m_Layer->GetInputSlot(i);
467 previousLayerOutputSlot.Connect(layerInputSlot);
468 }
telsoa01c577f2c2018-08-31 09:22:23 +0100469 // Set outputs of tested layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000470 for (unsigned int i = 0; i < numOut; i++)
471 {
472 layer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
473 }
474
475 std::string layerName = LayerPolicy::NameStr;
476 std::string reasonIfUnsupported;
477 if (FactoryType::IsLayerSupported(*layer.m_Layer, DataType, reasonIfUnsupported))
478 {
479 std::string errorMsg = " layer expected support but found none.";
480 try
481 {
482 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() != nullptr;
Matteo Martincighfbebcbd2018-10-16 09:45:08 +0100483 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000484 return retVal;
485 }
telsoa01c577f2c2018-08-31 09:22:23 +0100486 catch(const armnn::InvalidArgumentException& e)
telsoa014fcda012018-03-09 14:13:49 +0000487 {
488 boost::ignore_unused(e);
489 // This is ok since we throw InvalidArgumentException when creating the dummy workload.
490 return true;
491 }
492 catch(const std::exception& e)
493 {
494 errorMsg = e.what();
495 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
496 return false;
497 }
telsoa01c577f2c2018-08-31 09:22:23 +0100498 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000499 {
500 errorMsg = "Unexpected error while testing support for ";
501 BOOST_TEST_ERROR(errorMsg << layerName);
502 return false;
503 }
504 }
505 else
506 {
507 std::string errorMsg = "layer expected no support (giving reason: " + reasonIfUnsupported + ") but found some.";
508 try
509 {
510 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() == nullptr;
511 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
512 return retVal;
513 }
514 // These two exceptions are ok: For workloads that are partially supported, attempting to instantiate them
515 // using parameters that make IsLayerSupported() return false should throw an
telsoa01c577f2c2018-08-31 09:22:23 +0100516 // InvalidArgumentException or UnimplementedException.
telsoa014fcda012018-03-09 14:13:49 +0000517 catch(const armnn::InvalidArgumentException& e)
518 {
519 boost::ignore_unused(e);
520 return true;
521 }
telsoa01c577f2c2018-08-31 09:22:23 +0100522 catch(const armnn::UnimplementedException& e)
telsoa014fcda012018-03-09 14:13:49 +0000523 {
524 boost::ignore_unused(e);
525 return true;
526 }
527 catch(const std::exception& e)
528 {
529 errorMsg = e.what();
530 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
531 return false;
532 }
telsoa01c577f2c2018-08-31 09:22:23 +0100533 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000534 {
535 errorMsg = "Unexpected error while testing support for ";
536 BOOST_TEST_ERROR(errorMsg << layerName);
537 return false;
538 }
539 }
540}
541
telsoa01c577f2c2018-08-31 09:22:23 +0100542// Helper function to compute the next type in the LayerType enum.
telsoa014fcda012018-03-09 14:13:49 +0000543constexpr armnn::LayerType NextType(armnn::LayerType type)
544{
545 return static_cast<armnn::LayerType>(static_cast<int>(type)+1);
546}
547
telsoa01c577f2c2018-08-31 09:22:23 +0100548// Termination function for determining the end of the LayerType enumeration.
telsoa014fcda012018-03-09 14:13:49 +0000549template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
550bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<armnn::LayerType::LastLayer>)
551{
552 return IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000553}
telsoa014fcda012018-03-09 14:13:49 +0000554
telsoa01c577f2c2018-08-31 09:22:23 +0100555// Recursive function to test and enter in the LayerType enum and then iterate on the next entry.
telsoa014fcda012018-03-09 14:13:49 +0000556template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
557bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<Type>)
558{
559 bool v = IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
560
561 return v &&
562 IsLayerSupportedTestsImpl<FactoryType, DataType, NextType(Type)>
563 (factory, Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000564}
telsoa014fcda012018-03-09 14:13:49 +0000565
566// Helper function to pass through to the test framework.
567template<typename FactoryType, armnn::DataType DataType>
568bool IsLayerSupportedTests(FactoryType *factory)
569{
570 return IsLayerSupportedTestsImpl<FactoryType, DataType>(factory, Tag<armnn::LayerType::FirstLayer>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000571}
telsoa014fcda012018-03-09 14:13:49 +0000572
573template<armnn::LayerType Type>
574bool TestLayerTypeMatches()
575{
576 using LayerPolicy = LayerTypePolicy<Type, armnn::DataType::Float32>;
577 using LayerType = typename LayerPolicy::Type;
578 using LayerDesc = typename LayerPolicy::Desc;
579 DummyLayer<LayerType, LayerDesc> layer;
580
581 std::stringstream ss;
582 ss << LayerPolicy::NameStr << " layer type mismatches expected layer type value.";
583 bool v = Type == layer.m_Layer->GetType();
584 BOOST_CHECK_MESSAGE(v, ss.str());
585 return v;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000586}
telsoa014fcda012018-03-09 14:13:49 +0000587
588template<armnn::LayerType Type>
589bool LayerTypeMatchesTestImpl(Tag<armnn::LayerType::LastLayer>)
590{
591 return TestLayerTypeMatches<Type>();
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000592}
telsoa014fcda012018-03-09 14:13:49 +0000593
594template<armnn::LayerType Type>
595bool LayerTypeMatchesTestImpl(Tag<Type>)
596{
597 return TestLayerTypeMatches<Type>() &&
598 LayerTypeMatchesTestImpl<NextType(Type)>(Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000599}
telsoa014fcda012018-03-09 14:13:49 +0000600
telsoa01c577f2c2018-08-31 09:22:23 +0100601template<typename FactoryType, typename LayerType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
602bool IsConvertLayerSupportedTests(std::string& reasonIfUnsupported)
603{
604 armnn::Graph graph;
605 LayerType* const layer = graph.AddLayer<LayerType>("LayerName");
606
607 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
608 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
609
610 armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, InputDataType);
611 armnn::TensorInfo outputTensorInfo({1, 3, 2, 3}, OutputDataType);
612
613 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
614 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
615 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
616 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
617
618 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
619
620 return result;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000621}
telsoa01c577f2c2018-08-31 09:22:23 +0100622
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000623template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
624bool IsMeanLayerSupportedTests(std::string& reasonIfUnsupported)
625{
626 armnn::Graph graph;
627 static const std::vector<unsigned> axes = {1, 0};
628 armnn::MeanDescriptor desc(axes, false);
629
630 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
631
632 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
633 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
634
635 armnn::TensorInfo inputTensorInfo({4, 3, 2}, InputDataType);
636 armnn::TensorInfo outputTensorInfo({2}, OutputDataType);
637
638 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
639 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
640 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
641 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
642
643 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
644
645 return result;
646}
647
James Conroy4d1ff582019-06-10 17:06:39 +0100648// Tests that IsMeanSupported fails when input tensor dimensions
649// do not match output tensor dimensions when keepDims == true
650template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
651bool IsMeanLayerNotSupportedTests(std::string& reasonIfUnsupported)
652{
653 armnn::Graph graph;
654 static const std::vector<unsigned> axes = {};
655 // Set keepDims == true
656 armnn::MeanDescriptor desc(axes, true);
657
658 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
659
660 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
661 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
662
663 // Mismatching number of tensor dimensions
664 armnn::TensorInfo inputTensorInfo({1, 1, 1, 1}, InputDataType);
665 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
666
667 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
668 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
669 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
670 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
671
672 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
673
674 return result;
675}
676
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000677
telsoa014fcda012018-03-09 14:13:49 +0000678} //namespace