blob: e492cd6908cc5fb41c6f3c15c4d0de1a8aeba9bc [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00007#include <Graph.hpp>
8
9#include <backendsCommon/WorkloadFactory.hpp>
telsoa014fcda012018-03-09 14:13:49 +000010
11#include <boost/core/ignore_unused.hpp>
12
13namespace
14{
15armnn::Graph dummyGraph;
16
telsoa01c577f2c2018-08-31 09:22:23 +010017// Make a dummy TensorInfo object.
telsoa014fcda012018-03-09 14:13:49 +000018template<armnn::DataType DataType>
19armnn::TensorInfo MakeDummyTensorInfo()
20{
21 return armnn::TensorInfo({2,2,2,2}, DataType);
22}
23
24
25// Make a dummy WorkloadInfo using a dummy TensorInfo.
26template<armnn::DataType DataType>
27armnn::WorkloadInfo MakeDummyWorkloadInfo(unsigned int numInputs, unsigned int numOutputs)
28{
29 armnn::WorkloadInfo info;
James Conroyee18dc82019-07-17 11:27:46 +010030
telsoa014fcda012018-03-09 14:13:49 +000031 for (unsigned int i=0; i < numInputs; i++)
32 {
33 info.m_InputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
34 }
James Conroyee18dc82019-07-17 11:27:46 +010035
telsoa014fcda012018-03-09 14:13:49 +000036 for (unsigned int o=0; o < numOutputs; o++)
37 {
38 info.m_OutputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
39 }
James Conroyee18dc82019-07-17 11:27:46 +010040
telsoa014fcda012018-03-09 14:13:49 +000041 return info;
42}
43
telsoa01c577f2c2018-08-31 09:22:23 +010044// Template class to create a dummy layer (2 parameters).
telsoa014fcda012018-03-09 14:13:49 +000045template<typename LayerType, typename DescType = typename LayerType::DescriptorType>
46struct DummyLayer
47{
48 DummyLayer()
49 {
50 m_Layer = dummyGraph.AddLayer<LayerType>(DescType(), "");
51 }
James Conroyee18dc82019-07-17 11:27:46 +010052
telsoa014fcda012018-03-09 14:13:49 +000053 ~DummyLayer()
54 {
55 dummyGraph.EraseLayer(m_Layer);
56 }
James Conroyee18dc82019-07-17 11:27:46 +010057
telsoa014fcda012018-03-09 14:13:49 +000058 LayerType* m_Layer;
59};
60
telsoa01c577f2c2018-08-31 09:22:23 +010061// Template class to create a dummy layer (1 parameter).
telsoa014fcda012018-03-09 14:13:49 +000062template<typename LayerType>
63struct DummyLayer<LayerType, void>
64{
65 DummyLayer()
66 {
67 m_Layer = dummyGraph.AddLayer<LayerType>("");
68 }
James Conroyee18dc82019-07-17 11:27:46 +010069
telsoa014fcda012018-03-09 14:13:49 +000070 ~DummyLayer()
71 {
72 dummyGraph.EraseLayer(m_Layer);
73 }
James Conroyee18dc82019-07-17 11:27:46 +010074
telsoa014fcda012018-03-09 14:13:49 +000075 LayerType* m_Layer;
76};
77
78template<>
telsoa01c577f2c2018-08-31 09:22:23 +010079struct DummyLayer<armnn::BatchNormalizationLayer>
80{
81 DummyLayer()
82 {
83 m_Layer = dummyGraph.AddLayer<armnn::BatchNormalizationLayer>(armnn::BatchNormalizationDescriptor(), "");
84 m_Layer->m_Mean = std::make_unique<armnn::ScopedCpuTensorHandle>(
85 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
86 m_Layer->m_Variance = std::make_unique<armnn::ScopedCpuTensorHandle>(
87 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
88 m_Layer->m_Beta = std::make_unique<armnn::ScopedCpuTensorHandle>(
89 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
90 m_Layer->m_Gamma = std::make_unique<armnn::ScopedCpuTensorHandle>(
91 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
92 }
James Conroyee18dc82019-07-17 11:27:46 +010093
telsoa01c577f2c2018-08-31 09:22:23 +010094 ~DummyLayer()
95 {
96 dummyGraph.EraseLayer(m_Layer);
97 }
telsoa01c577f2c2018-08-31 09:22:23 +010098
James Conroyee18dc82019-07-17 11:27:46 +010099 armnn::BatchNormalizationLayer* m_Layer;
telsoa01c577f2c2018-08-31 09:22:23 +0100100};
101
102template<>
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000103struct DummyLayer<armnn::BatchToSpaceNdLayer>
104{
105 DummyLayer()
106 {
107 m_Layer = dummyGraph.AddLayer<armnn::BatchToSpaceNdLayer>(armnn::BatchToSpaceNdDescriptor(), "");
108 }
James Conroyee18dc82019-07-17 11:27:46 +0100109
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000110 ~DummyLayer()
111 {
112 dummyGraph.EraseLayer(m_Layer);
113 }
James Conroyee18dc82019-07-17 11:27:46 +0100114
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000115 armnn::BatchToSpaceNdLayer* m_Layer;
116};
117
118template<>
telsoa014fcda012018-03-09 14:13:49 +0000119struct DummyLayer<armnn::ConstantLayer, void>
120{
121 DummyLayer()
122 {
telsoa01c577f2c2018-08-31 09:22:23 +0100123 m_Layer = dummyGraph.AddLayer<armnn::ConstantLayer>("");
telsoa014fcda012018-03-09 14:13:49 +0000124 }
James Conroyee18dc82019-07-17 11:27:46 +0100125
telsoa014fcda012018-03-09 14:13:49 +0000126 ~DummyLayer()
127 {
128 dummyGraph.EraseLayer(m_Layer);
129 }
James Conroyee18dc82019-07-17 11:27:46 +0100130
telsoa014fcda012018-03-09 14:13:49 +0000131 armnn::ConstantLayer* m_Layer;
132};
133
134template<>
135struct DummyLayer<armnn::InputLayer, armnn::LayerBindingId>
136{
137 DummyLayer()
138 {
139 m_Layer = dummyGraph.AddLayer<armnn::InputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000140 }
James Conroyee18dc82019-07-17 11:27:46 +0100141
telsoa014fcda012018-03-09 14:13:49 +0000142 ~DummyLayer()
143 {
144 dummyGraph.EraseLayer(m_Layer);
145 }
James Conroyee18dc82019-07-17 11:27:46 +0100146
telsoa014fcda012018-03-09 14:13:49 +0000147 armnn::InputLayer* m_Layer;
148};
149
150template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100151struct DummyLayer<armnn::ConcatLayer>
telsoa014fcda012018-03-09 14:13:49 +0000152{
153 DummyLayer()
154 {
155 armnn::OriginsDescriptor desc(2);
Jim Flynne242f2d2019-05-22 14:24:13 +0100156 m_Layer = dummyGraph.AddLayer<armnn::ConcatLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000157 }
James Conroyee18dc82019-07-17 11:27:46 +0100158
telsoa014fcda012018-03-09 14:13:49 +0000159 ~DummyLayer()
160 {
161 dummyGraph.EraseLayer(m_Layer);
162 }
James Conroyee18dc82019-07-17 11:27:46 +0100163
Jim Flynne242f2d2019-05-22 14:24:13 +0100164 armnn::ConcatLayer* m_Layer;
telsoa014fcda012018-03-09 14:13:49 +0000165};
166
167template<>
168struct DummyLayer<armnn::OutputLayer, armnn::LayerBindingId>
169{
170 DummyLayer()
171 {
172 m_Layer = dummyGraph.AddLayer<armnn::OutputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000173 }
James Conroyee18dc82019-07-17 11:27:46 +0100174
telsoa014fcda012018-03-09 14:13:49 +0000175 ~DummyLayer()
176 {
177 dummyGraph.EraseLayer(m_Layer);
178 }
James Conroyee18dc82019-07-17 11:27:46 +0100179
telsoa014fcda012018-03-09 14:13:49 +0000180 armnn::OutputLayer* m_Layer;
181};
182
183template<>
184struct DummyLayer<armnn::SplitterLayer>
185{
186 DummyLayer()
187 {
188 armnn::ViewsDescriptor desc(1);
189 m_Layer = dummyGraph.AddLayer<armnn::SplitterLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000190 }
James Conroyee18dc82019-07-17 11:27:46 +0100191
telsoa014fcda012018-03-09 14:13:49 +0000192 ~DummyLayer()
193 {
194 dummyGraph.EraseLayer(m_Layer);
195 }
James Conroyee18dc82019-07-17 11:27:46 +0100196
telsoa014fcda012018-03-09 14:13:49 +0000197 armnn::SplitterLayer* m_Layer;
198};
199
200template <typename ConvolutionLayerType>
201struct DummyConvolutionLayer
202{
203 DummyConvolutionLayer()
204 {
205 typename ConvolutionLayerType::DescriptorType desc;
206 m_Layer = dummyGraph.AddLayer<ConvolutionLayerType>(desc, "");
207 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
208 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
209 m_Layer->m_Bias = std::make_unique<armnn::ScopedCpuTensorHandle>(
210 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
211 }
James Conroyee18dc82019-07-17 11:27:46 +0100212
telsoa014fcda012018-03-09 14:13:49 +0000213 ~DummyConvolutionLayer()
214 {
215 dummyGraph.EraseLayer(m_Layer);
216 }
James Conroyee18dc82019-07-17 11:27:46 +0100217
telsoa014fcda012018-03-09 14:13:49 +0000218 ConvolutionLayerType* m_Layer;
219};
220
221template<>
222struct DummyLayer<armnn::Convolution2dLayer>
223 : public DummyConvolutionLayer<armnn::Convolution2dLayer>
224{
225};
226
227template<>
228struct DummyLayer<armnn::DepthwiseConvolution2dLayer>
229 : public DummyConvolutionLayer<armnn::DepthwiseConvolution2dLayer>
230{
231};
232
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100233template<>
234struct DummyLayer<armnn::TransposeConvolution2dLayer>
235 : public DummyConvolutionLayer<armnn::TransposeConvolution2dLayer>
236{
237};
238
telsoa01c577f2c2018-08-31 09:22:23 +0100239template <typename LstmLayerType>
240struct DummyLstmLayer
241{
242 DummyLstmLayer()
243 {
244 typename LstmLayerType::DescriptorType desc;
245 desc.m_CifgEnabled = false;
246
247 m_Layer = dummyGraph.AddLayer<LstmLayerType>(armnn::LstmDescriptor(), "");
248 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
249 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
250 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
251 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
252 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
253 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
254 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
255 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
256 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
257 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
258 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
259 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
260 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
261 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
262 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
263 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
264 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
265 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
266
267 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
268 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
269 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
270 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
271 m_Layer->m_CifgParameters.m_CellToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
272 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
273 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
274 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
275 }
James Conroyee18dc82019-07-17 11:27:46 +0100276
telsoa01c577f2c2018-08-31 09:22:23 +0100277 ~DummyLstmLayer()
278 {
279 dummyGraph.EraseLayer(m_Layer);
280 }
James Conroyee18dc82019-07-17 11:27:46 +0100281
telsoa01c577f2c2018-08-31 09:22:23 +0100282 armnn::LstmLayer* m_Layer;
283};
284
285template<>
286struct DummyLayer<armnn::LstmLayer>
287 : public DummyLstmLayer<armnn::LstmLayer>
288{
289};
290
291template<>
James Conroyee18dc82019-07-17 11:27:46 +0100292struct DummyLayer<armnn::QuantizedLstmLayer, void>
293{
294 DummyLayer()
295 {
296 m_Layer = dummyGraph.AddLayer<armnn::QuantizedLstmLayer>("");
297
298 m_Layer->m_QuantizedLstmParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
299 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
300 m_Layer->m_QuantizedLstmParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
301 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
302 m_Layer->m_QuantizedLstmParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
303 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
304 m_Layer->m_QuantizedLstmParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
305 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
306
307 m_Layer->m_QuantizedLstmParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
308 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
309 m_Layer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
310 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
311 m_Layer->m_QuantizedLstmParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
312 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
313 m_Layer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
314 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QuantisedAsymm8));
315
316 m_Layer->m_QuantizedLstmParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
317 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
318 m_Layer->m_QuantizedLstmParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
319 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
320 m_Layer->m_QuantizedLstmParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
321 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
322 m_Layer->m_QuantizedLstmParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
323 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
324 }
325
326 ~DummyLayer()
327 {
328 dummyGraph.EraseLayer(m_Layer);
329 }
330
331 armnn::QuantizedLstmLayer* m_Layer;
332};
333
334template<>
telsoa01c577f2c2018-08-31 09:22:23 +0100335struct DummyLayer<armnn::FullyConnectedLayer>
336{
337 DummyLayer()
338 {
339 armnn::FullyConnectedLayer::DescriptorType desc;
340 m_Layer = dummyGraph.AddLayer<armnn::FullyConnectedLayer>(desc, "");
341 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
342 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
343 }
James Conroyee18dc82019-07-17 11:27:46 +0100344
telsoa01c577f2c2018-08-31 09:22:23 +0100345 ~DummyLayer()
346 {
347 dummyGraph.EraseLayer(m_Layer);
348 }
James Conroyee18dc82019-07-17 11:27:46 +0100349
telsoa01c577f2c2018-08-31 09:22:23 +0100350 armnn::FullyConnectedLayer* m_Layer;
351};
352
telsoa014fcda012018-03-09 14:13:49 +0000353// Tag for giving LayerType entries a unique strong type each.
354template<armnn::LayerType>
355struct Tag{};
356
357#define DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, descType) \
358template<armnn::DataType DataType> \
359struct LayerTypePolicy<armnn::LayerType::name, DataType> \
360{ \
361 using Type = armnn::name##Layer; \
362 using Desc = descType; \
363 using QueueDesc = armnn::name##QueueDescriptor; \
364 constexpr static const char* NameStr = #name; \
365 \
366 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
367 unsigned int nIn, unsigned int nOut) \
368 { \
369 QueueDesc desc; \
370 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
371 return factory->Create##name(desc, info); \
372 } \
373};
374
telsoa01c577f2c2018-08-31 09:22:23 +0100375// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000376// Use this version for layers whose constructor takes 1 parameter(name).
377#define DECLARE_LAYER_POLICY_1_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, void)
378
telsoa01c577f2c2018-08-31 09:22:23 +0100379// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000380// Use this version for layers whose constructor takes 2 parameters(descriptor and name).
381#define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor)
382
telsoa01c577f2c2018-08-31 09:22:23 +0100383// Layer policy template.
telsoa014fcda012018-03-09 14:13:49 +0000384template<armnn::LayerType Type, armnn::DataType DataType>
385struct LayerTypePolicy;
386
387// Every entry in the armnn::LayerType enum must be accounted for below.
Kevin May868eb142019-09-04 17:29:31 +0100388DECLARE_LAYER_POLICY_1_PARAM(Abs)
389
telsoa014fcda012018-03-09 14:13:49 +0000390DECLARE_LAYER_POLICY_2_PARAM(Activation)
391
392DECLARE_LAYER_POLICY_1_PARAM(Addition)
393
Nikhil Rajee391d52019-09-05 17:50:44 +0100394DECLARE_LAYER_POLICY_2_PARAM(ArgMinMax)
395
telsoa014fcda012018-03-09 14:13:49 +0000396DECLARE_LAYER_POLICY_2_PARAM(BatchNormalization)
397
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000398DECLARE_LAYER_POLICY_2_PARAM(BatchToSpaceNd)
399
Jim Flynne242f2d2019-05-22 14:24:13 +0100400DECLARE_LAYER_POLICY_2_PARAM(Concat)
401
telsoa014fcda012018-03-09 14:13:49 +0000402DECLARE_LAYER_POLICY_1_PARAM(Constant)
403
telsoa01c577f2c2018-08-31 09:22:23 +0100404DECLARE_LAYER_POLICY_1_PARAM(ConvertFp16ToFp32)
405
406DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToFp16)
407
telsoa014fcda012018-03-09 14:13:49 +0000408DECLARE_LAYER_POLICY_2_PARAM(Convolution2d)
409
410DECLARE_LAYER_POLICY_1_PARAM(MemCopy)
411
Derek Lambertif674aa02019-08-01 15:56:25 +0100412DECLARE_LAYER_POLICY_1_PARAM(MemImport)
413
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000414DECLARE_LAYER_POLICY_1_PARAM(Debug)
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000415
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100416DECLARE_LAYER_POLICY_2_PARAM(DepthToSpace)
417
telsoa014fcda012018-03-09 14:13:49 +0000418DECLARE_LAYER_POLICY_2_PARAM(DepthwiseConvolution2d)
419
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000420DECLARE_LAYER_POLICY_1_PARAM(Dequantize)
421
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000422DECLARE_LAYER_POLICY_2_PARAM(DetectionPostProcess)
423
FrancisMurtagh20995952018-12-17 12:11:36 +0000424DECLARE_LAYER_POLICY_1_PARAM(Equal)
425
telsoa014fcda012018-03-09 14:13:49 +0000426DECLARE_LAYER_POLICY_2_PARAM(FakeQuantization)
427
428DECLARE_LAYER_POLICY_1_PARAM(Floor)
429
430DECLARE_LAYER_POLICY_2_PARAM(FullyConnected)
431
narpra01b89b05f2019-01-16 09:53:09 +0000432DECLARE_LAYER_POLICY_1_PARAM(Gather)
433
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000434DECLARE_LAYER_POLICY_1_PARAM(Greater)
435
telsoa014fcda012018-03-09 14:13:49 +0000436DECLARE_LAYER_POLICY_CUSTOM_PARAM(Input, armnn::LayerBindingId)
437
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100438DECLARE_LAYER_POLICY_2_PARAM(L2Normalization)
telsoa014fcda012018-03-09 14:13:49 +0000439
telsoa01c577f2c2018-08-31 09:22:23 +0100440DECLARE_LAYER_POLICY_2_PARAM(Lstm)
441
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000442DECLARE_LAYER_POLICY_1_PARAM(Maximum)
443
narpra0132b90462018-09-13 11:07:48 +0100444DECLARE_LAYER_POLICY_2_PARAM(Mean)
445
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100446DECLARE_LAYER_POLICY_1_PARAM(Merge)
447
kevmay0190539692018-11-29 08:40:19 +0000448DECLARE_LAYER_POLICY_1_PARAM(Minimum)
449
telsoa014fcda012018-03-09 14:13:49 +0000450DECLARE_LAYER_POLICY_1_PARAM(Multiplication)
451
452DECLARE_LAYER_POLICY_2_PARAM(Normalization)
453
454DECLARE_LAYER_POLICY_CUSTOM_PARAM(Output, armnn::LayerBindingId)
455
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100456DECLARE_LAYER_POLICY_2_PARAM(Pad)
457
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000458DECLARE_LAYER_POLICY_1_PARAM(Quantize)
459
telsoa014fcda012018-03-09 14:13:49 +0000460DECLARE_LAYER_POLICY_2_PARAM(Permute)
461
462DECLARE_LAYER_POLICY_2_PARAM(Pooling2d)
463
Matteo Martincigh49124022019-01-11 13:25:59 +0000464DECLARE_LAYER_POLICY_2_PARAM(PreCompiled)
465
Matteo Martincigh0e406ee2019-06-12 15:42:18 +0100466DECLARE_LAYER_POLICY_1_PARAM(Prelu)
467
James Conroyee18dc82019-07-17 11:27:46 +0100468DECLARE_LAYER_POLICY_1_PARAM(QuantizedLstm)
469
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100470DECLARE_LAYER_POLICY_1_PARAM(Division)
471
Teresa Charlina9075df2019-06-27 15:41:57 +0100472DECLARE_LAYER_POLICY_2_PARAM(Resize)
473
telsoa01c577f2c2018-08-31 09:22:23 +0100474DECLARE_LAYER_POLICY_2_PARAM(Reshape)
475
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000476DECLARE_LAYER_POLICY_1_PARAM(Rsqrt)
477
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100478DECLARE_LAYER_POLICY_2_PARAM(Slice)
479
telsoa014fcda012018-03-09 14:13:49 +0000480DECLARE_LAYER_POLICY_2_PARAM(Softmax)
481
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000482DECLARE_LAYER_POLICY_2_PARAM(SpaceToBatchNd)
483
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100484DECLARE_LAYER_POLICY_2_PARAM(SpaceToDepth)
485
telsoa014fcda012018-03-09 14:13:49 +0000486DECLARE_LAYER_POLICY_2_PARAM(Splitter)
487
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100488DECLARE_LAYER_POLICY_2_PARAM(Stack)
489
Conor Kennedy430b5d82018-11-14 15:28:28 +0000490DECLARE_LAYER_POLICY_2_PARAM(StridedSlice)
491
David Beckc2044fe2018-09-05 15:00:38 +0100492DECLARE_LAYER_POLICY_1_PARAM(Subtraction)
telsoa014fcda012018-03-09 14:13:49 +0000493
Sadik Armaganeff363d2019-04-05 15:25:46 +0100494DECLARE_LAYER_POLICY_1_PARAM(Switch)
495
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100496DECLARE_LAYER_POLICY_2_PARAM(TransposeConvolution2d)
497
telsoa014fcda012018-03-09 14:13:49 +0000498
499// Generic implementation to get the number of input slots for a given layer type;
500template<armnn::LayerType Type>
501unsigned int GetNumInputs(const armnn::Layer& layer)
502{
503 return layer.GetNumInputSlots();
504}
505
506// Generic implementation to get the number of output slots for a given layer type;
507template<armnn::LayerType Type>
508unsigned int GetNumOutputs(const armnn::Layer& layer)
509{
510 return layer.GetNumOutputSlots();
511}
512
513template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100514unsigned int GetNumInputs<armnn::LayerType::Concat>(const armnn::Layer& layer)
telsoa014fcda012018-03-09 14:13:49 +0000515{
516 boost::ignore_unused(layer);
517 return 2;
518}
519
telsoa01c577f2c2018-08-31 09:22:23 +0100520// Tests that the IsLayerSupported() function returns the correct value.
521// We determined the correct value by *trying* to create the relevant workload and seeing if it matches what we expect.
telsoa014fcda012018-03-09 14:13:49 +0000522// Returns true if expectations are met, otherwise returns false.
523template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
524bool IsLayerSupportedTest(FactoryType *factory, Tag<Type>)
525{
526 using LayerPolicy = LayerTypePolicy<Type, DataType>;
527 using LayerType = typename LayerPolicy::Type;
528 using LayerDesc = typename LayerPolicy::Desc;
529 DummyLayer<LayerType, LayerDesc> layer;
530
531 unsigned int numIn = GetNumInputs<Type>(*layer.m_Layer);
532 unsigned int numOut = GetNumOutputs<Type>(*layer.m_Layer);
533
telsoa01c577f2c2018-08-31 09:22:23 +0100534 // Make another dummy layer just to make IsLayerSupported have valid inputs.
telsoa014fcda012018-03-09 14:13:49 +0000535 DummyLayer<armnn::ConstantLayer, void> previousLayer;
telsoa01c577f2c2018-08-31 09:22:23 +0100536 // Set output of the previous layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000537 armnn::TensorInfo output = MakeDummyTensorInfo<DataType>();
538 previousLayer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
telsoa01c577f2c2018-08-31 09:22:23 +0100539 // Connect all outputs of the previous layer to inputs of tested layer.
telsoa014fcda012018-03-09 14:13:49 +0000540 for (unsigned int i = 0; i < numIn; i++)
541 {
542 armnn::IOutputSlot& previousLayerOutputSlot = previousLayer.m_Layer->GetOutputSlot(0);
543 armnn::IInputSlot& layerInputSlot = layer.m_Layer->GetInputSlot(i);
544 previousLayerOutputSlot.Connect(layerInputSlot);
545 }
telsoa01c577f2c2018-08-31 09:22:23 +0100546 // Set outputs of tested layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000547 for (unsigned int i = 0; i < numOut; i++)
548 {
549 layer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
550 }
551
552 std::string layerName = LayerPolicy::NameStr;
553 std::string reasonIfUnsupported;
554 if (FactoryType::IsLayerSupported(*layer.m_Layer, DataType, reasonIfUnsupported))
555 {
556 std::string errorMsg = " layer expected support but found none.";
557 try
558 {
559 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() != nullptr;
Matteo Martincighfbebcbd2018-10-16 09:45:08 +0100560 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000561 return retVal;
562 }
telsoa01c577f2c2018-08-31 09:22:23 +0100563 catch(const armnn::InvalidArgumentException& e)
telsoa014fcda012018-03-09 14:13:49 +0000564 {
565 boost::ignore_unused(e);
566 // This is ok since we throw InvalidArgumentException when creating the dummy workload.
567 return true;
568 }
569 catch(const std::exception& e)
570 {
571 errorMsg = e.what();
572 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
573 return false;
574 }
telsoa01c577f2c2018-08-31 09:22:23 +0100575 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000576 {
577 errorMsg = "Unexpected error while testing support for ";
578 BOOST_TEST_ERROR(errorMsg << layerName);
579 return false;
580 }
581 }
582 else
583 {
584 std::string errorMsg = "layer expected no support (giving reason: " + reasonIfUnsupported + ") but found some.";
585 try
586 {
587 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() == nullptr;
588 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
589 return retVal;
590 }
591 // These two exceptions are ok: For workloads that are partially supported, attempting to instantiate them
592 // using parameters that make IsLayerSupported() return false should throw an
telsoa01c577f2c2018-08-31 09:22:23 +0100593 // InvalidArgumentException or UnimplementedException.
telsoa014fcda012018-03-09 14:13:49 +0000594 catch(const armnn::InvalidArgumentException& e)
595 {
596 boost::ignore_unused(e);
597 return true;
598 }
telsoa01c577f2c2018-08-31 09:22:23 +0100599 catch(const armnn::UnimplementedException& e)
telsoa014fcda012018-03-09 14:13:49 +0000600 {
601 boost::ignore_unused(e);
602 return true;
603 }
604 catch(const std::exception& e)
605 {
606 errorMsg = e.what();
607 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
608 return false;
609 }
telsoa01c577f2c2018-08-31 09:22:23 +0100610 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000611 {
612 errorMsg = "Unexpected error while testing support for ";
613 BOOST_TEST_ERROR(errorMsg << layerName);
614 return false;
615 }
616 }
617}
618
telsoa01c577f2c2018-08-31 09:22:23 +0100619// Helper function to compute the next type in the LayerType enum.
telsoa014fcda012018-03-09 14:13:49 +0000620constexpr armnn::LayerType NextType(armnn::LayerType type)
621{
622 return static_cast<armnn::LayerType>(static_cast<int>(type)+1);
623}
624
telsoa01c577f2c2018-08-31 09:22:23 +0100625// Termination function for determining the end of the LayerType enumeration.
telsoa014fcda012018-03-09 14:13:49 +0000626template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
627bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<armnn::LayerType::LastLayer>)
628{
629 return IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000630}
telsoa014fcda012018-03-09 14:13:49 +0000631
telsoa01c577f2c2018-08-31 09:22:23 +0100632// Recursive function to test and enter in the LayerType enum and then iterate on the next entry.
telsoa014fcda012018-03-09 14:13:49 +0000633template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
634bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<Type>)
635{
636 bool v = IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
637
638 return v &&
639 IsLayerSupportedTestsImpl<FactoryType, DataType, NextType(Type)>
640 (factory, Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000641}
telsoa014fcda012018-03-09 14:13:49 +0000642
643// Helper function to pass through to the test framework.
644template<typename FactoryType, armnn::DataType DataType>
645bool IsLayerSupportedTests(FactoryType *factory)
646{
647 return IsLayerSupportedTestsImpl<FactoryType, DataType>(factory, Tag<armnn::LayerType::FirstLayer>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000648}
telsoa014fcda012018-03-09 14:13:49 +0000649
650template<armnn::LayerType Type>
651bool TestLayerTypeMatches()
652{
653 using LayerPolicy = LayerTypePolicy<Type, armnn::DataType::Float32>;
654 using LayerType = typename LayerPolicy::Type;
655 using LayerDesc = typename LayerPolicy::Desc;
656 DummyLayer<LayerType, LayerDesc> layer;
657
658 std::stringstream ss;
659 ss << LayerPolicy::NameStr << " layer type mismatches expected layer type value.";
660 bool v = Type == layer.m_Layer->GetType();
661 BOOST_CHECK_MESSAGE(v, ss.str());
662 return v;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000663}
telsoa014fcda012018-03-09 14:13:49 +0000664
665template<armnn::LayerType Type>
666bool LayerTypeMatchesTestImpl(Tag<armnn::LayerType::LastLayer>)
667{
668 return TestLayerTypeMatches<Type>();
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000669}
telsoa014fcda012018-03-09 14:13:49 +0000670
671template<armnn::LayerType Type>
672bool LayerTypeMatchesTestImpl(Tag<Type>)
673{
674 return TestLayerTypeMatches<Type>() &&
675 LayerTypeMatchesTestImpl<NextType(Type)>(Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000676}
telsoa014fcda012018-03-09 14:13:49 +0000677
telsoa01c577f2c2018-08-31 09:22:23 +0100678template<typename FactoryType, typename LayerType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
679bool IsConvertLayerSupportedTests(std::string& reasonIfUnsupported)
680{
681 armnn::Graph graph;
682 LayerType* const layer = graph.AddLayer<LayerType>("LayerName");
683
684 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
685 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
686
687 armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, InputDataType);
688 armnn::TensorInfo outputTensorInfo({1, 3, 2, 3}, OutputDataType);
689
690 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
691 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
692 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
693 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
694
695 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
696
697 return result;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000698}
telsoa01c577f2c2018-08-31 09:22:23 +0100699
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000700template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
701bool IsMeanLayerSupportedTests(std::string& reasonIfUnsupported)
702{
703 armnn::Graph graph;
704 static const std::vector<unsigned> axes = {1, 0};
705 armnn::MeanDescriptor desc(axes, false);
706
707 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
708
709 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
710 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
711
712 armnn::TensorInfo inputTensorInfo({4, 3, 2}, InputDataType);
713 armnn::TensorInfo outputTensorInfo({2}, OutputDataType);
714
715 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
716 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
717 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
718 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
719
720 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
721
722 return result;
723}
724
James Conroy4d1ff582019-06-10 17:06:39 +0100725// Tests that IsMeanSupported fails when input tensor dimensions
726// do not match output tensor dimensions when keepDims == true
727template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
728bool IsMeanLayerNotSupportedTests(std::string& reasonIfUnsupported)
729{
730 armnn::Graph graph;
731 static const std::vector<unsigned> axes = {};
732 // Set keepDims == true
733 armnn::MeanDescriptor desc(axes, true);
734
735 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
736
737 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
738 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
739
740 // Mismatching number of tensor dimensions
741 armnn::TensorInfo inputTensorInfo({1, 1, 1, 1}, InputDataType);
742 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
743
744 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
745 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
746 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
747 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
748
749 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
750
751 return result;
752}
753
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000754
telsoa014fcda012018-03-09 14:13:49 +0000755} //namespace