blob: ed4b6ff09a9366d0e895a9a8129f8048fc58a7ea [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00007#include <Graph.hpp>
8
9#include <backendsCommon/WorkloadFactory.hpp>
telsoa014fcda012018-03-09 14:13:49 +000010
Jan Eilers8eb25602020-03-09 12:13:48 +000011#include <armnn/utility/IgnoreUnused.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
13namespace
14{
15armnn::Graph dummyGraph;
16
telsoa01c577f2c2018-08-31 09:22:23 +010017// Make a dummy TensorInfo object.
telsoa014fcda012018-03-09 14:13:49 +000018template<armnn::DataType DataType>
19armnn::TensorInfo MakeDummyTensorInfo()
20{
Teresa Charlin33d58272020-01-28 12:24:34 +000021 return armnn::TensorInfo({2,2,2,2}, DataType, 1.0, 0);
telsoa014fcda012018-03-09 14:13:49 +000022}
23
24
25// Make a dummy WorkloadInfo using a dummy TensorInfo.
26template<armnn::DataType DataType>
27armnn::WorkloadInfo MakeDummyWorkloadInfo(unsigned int numInputs, unsigned int numOutputs)
28{
29 armnn::WorkloadInfo info;
James Conroyee18dc82019-07-17 11:27:46 +010030
telsoa014fcda012018-03-09 14:13:49 +000031 for (unsigned int i=0; i < numInputs; i++)
32 {
33 info.m_InputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
34 }
James Conroyee18dc82019-07-17 11:27:46 +010035
telsoa014fcda012018-03-09 14:13:49 +000036 for (unsigned int o=0; o < numOutputs; o++)
37 {
38 info.m_OutputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
39 }
James Conroyee18dc82019-07-17 11:27:46 +010040
telsoa014fcda012018-03-09 14:13:49 +000041 return info;
42}
43
telsoa01c577f2c2018-08-31 09:22:23 +010044// Template class to create a dummy layer (2 parameters).
telsoa014fcda012018-03-09 14:13:49 +000045template<typename LayerType, typename DescType = typename LayerType::DescriptorType>
46struct DummyLayer
47{
48 DummyLayer()
49 {
50 m_Layer = dummyGraph.AddLayer<LayerType>(DescType(), "");
51 }
James Conroyee18dc82019-07-17 11:27:46 +010052
telsoa014fcda012018-03-09 14:13:49 +000053 ~DummyLayer()
54 {
55 dummyGraph.EraseLayer(m_Layer);
56 }
James Conroyee18dc82019-07-17 11:27:46 +010057
telsoa014fcda012018-03-09 14:13:49 +000058 LayerType* m_Layer;
59};
60
telsoa01c577f2c2018-08-31 09:22:23 +010061// Template class to create a dummy layer (1 parameter).
telsoa014fcda012018-03-09 14:13:49 +000062template<typename LayerType>
63struct DummyLayer<LayerType, void>
64{
65 DummyLayer()
66 {
67 m_Layer = dummyGraph.AddLayer<LayerType>("");
68 }
James Conroyee18dc82019-07-17 11:27:46 +010069
telsoa014fcda012018-03-09 14:13:49 +000070 ~DummyLayer()
71 {
72 dummyGraph.EraseLayer(m_Layer);
73 }
James Conroyee18dc82019-07-17 11:27:46 +010074
telsoa014fcda012018-03-09 14:13:49 +000075 LayerType* m_Layer;
76};
77
78template<>
telsoa01c577f2c2018-08-31 09:22:23 +010079struct DummyLayer<armnn::BatchNormalizationLayer>
80{
81 DummyLayer()
82 {
83 m_Layer = dummyGraph.AddLayer<armnn::BatchNormalizationLayer>(armnn::BatchNormalizationDescriptor(), "");
84 m_Layer->m_Mean = std::make_unique<armnn::ScopedCpuTensorHandle>(
85 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
86 m_Layer->m_Variance = std::make_unique<armnn::ScopedCpuTensorHandle>(
87 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
88 m_Layer->m_Beta = std::make_unique<armnn::ScopedCpuTensorHandle>(
89 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
90 m_Layer->m_Gamma = std::make_unique<armnn::ScopedCpuTensorHandle>(
91 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
92 }
James Conroyee18dc82019-07-17 11:27:46 +010093
telsoa01c577f2c2018-08-31 09:22:23 +010094 ~DummyLayer()
95 {
96 dummyGraph.EraseLayer(m_Layer);
97 }
telsoa01c577f2c2018-08-31 09:22:23 +010098
James Conroyee18dc82019-07-17 11:27:46 +010099 armnn::BatchNormalizationLayer* m_Layer;
telsoa01c577f2c2018-08-31 09:22:23 +0100100};
101
102template<>
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000103struct DummyLayer<armnn::BatchToSpaceNdLayer>
104{
105 DummyLayer()
106 {
107 m_Layer = dummyGraph.AddLayer<armnn::BatchToSpaceNdLayer>(armnn::BatchToSpaceNdDescriptor(), "");
108 }
James Conroyee18dc82019-07-17 11:27:46 +0100109
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000110 ~DummyLayer()
111 {
112 dummyGraph.EraseLayer(m_Layer);
113 }
James Conroyee18dc82019-07-17 11:27:46 +0100114
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000115 armnn::BatchToSpaceNdLayer* m_Layer;
116};
117
118template<>
telsoa014fcda012018-03-09 14:13:49 +0000119struct DummyLayer<armnn::ConstantLayer, void>
120{
121 DummyLayer()
122 {
telsoa01c577f2c2018-08-31 09:22:23 +0100123 m_Layer = dummyGraph.AddLayer<armnn::ConstantLayer>("");
telsoa014fcda012018-03-09 14:13:49 +0000124 }
James Conroyee18dc82019-07-17 11:27:46 +0100125
telsoa014fcda012018-03-09 14:13:49 +0000126 ~DummyLayer()
127 {
128 dummyGraph.EraseLayer(m_Layer);
129 }
James Conroyee18dc82019-07-17 11:27:46 +0100130
telsoa014fcda012018-03-09 14:13:49 +0000131 armnn::ConstantLayer* m_Layer;
132};
133
134template<>
135struct DummyLayer<armnn::InputLayer, armnn::LayerBindingId>
136{
137 DummyLayer()
138 {
139 m_Layer = dummyGraph.AddLayer<armnn::InputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000140 }
James Conroyee18dc82019-07-17 11:27:46 +0100141
telsoa014fcda012018-03-09 14:13:49 +0000142 ~DummyLayer()
143 {
144 dummyGraph.EraseLayer(m_Layer);
145 }
James Conroyee18dc82019-07-17 11:27:46 +0100146
telsoa014fcda012018-03-09 14:13:49 +0000147 armnn::InputLayer* m_Layer;
148};
149
150template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100151struct DummyLayer<armnn::ConcatLayer>
telsoa014fcda012018-03-09 14:13:49 +0000152{
153 DummyLayer()
154 {
155 armnn::OriginsDescriptor desc(2);
Jim Flynne242f2d2019-05-22 14:24:13 +0100156 m_Layer = dummyGraph.AddLayer<armnn::ConcatLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000157 }
James Conroyee18dc82019-07-17 11:27:46 +0100158
telsoa014fcda012018-03-09 14:13:49 +0000159 ~DummyLayer()
160 {
161 dummyGraph.EraseLayer(m_Layer);
162 }
James Conroyee18dc82019-07-17 11:27:46 +0100163
Jim Flynne242f2d2019-05-22 14:24:13 +0100164 armnn::ConcatLayer* m_Layer;
telsoa014fcda012018-03-09 14:13:49 +0000165};
166
167template<>
168struct DummyLayer<armnn::OutputLayer, armnn::LayerBindingId>
169{
170 DummyLayer()
171 {
172 m_Layer = dummyGraph.AddLayer<armnn::OutputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000173 }
James Conroyee18dc82019-07-17 11:27:46 +0100174
telsoa014fcda012018-03-09 14:13:49 +0000175 ~DummyLayer()
176 {
177 dummyGraph.EraseLayer(m_Layer);
178 }
James Conroyee18dc82019-07-17 11:27:46 +0100179
telsoa014fcda012018-03-09 14:13:49 +0000180 armnn::OutputLayer* m_Layer;
181};
182
183template<>
184struct DummyLayer<armnn::SplitterLayer>
185{
186 DummyLayer()
187 {
188 armnn::ViewsDescriptor desc(1);
189 m_Layer = dummyGraph.AddLayer<armnn::SplitterLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000190 }
James Conroyee18dc82019-07-17 11:27:46 +0100191
telsoa014fcda012018-03-09 14:13:49 +0000192 ~DummyLayer()
193 {
194 dummyGraph.EraseLayer(m_Layer);
195 }
James Conroyee18dc82019-07-17 11:27:46 +0100196
telsoa014fcda012018-03-09 14:13:49 +0000197 armnn::SplitterLayer* m_Layer;
198};
199
200template <typename ConvolutionLayerType>
201struct DummyConvolutionLayer
202{
203 DummyConvolutionLayer()
204 {
205 typename ConvolutionLayerType::DescriptorType desc;
James Conroy663c1842019-11-01 15:21:48 +0000206 desc.m_StrideX = 1;
207 desc.m_StrideY = 1;
telsoa014fcda012018-03-09 14:13:49 +0000208 m_Layer = dummyGraph.AddLayer<ConvolutionLayerType>(desc, "");
209 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
210 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
211 m_Layer->m_Bias = std::make_unique<armnn::ScopedCpuTensorHandle>(
212 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
213 }
James Conroyee18dc82019-07-17 11:27:46 +0100214
telsoa014fcda012018-03-09 14:13:49 +0000215 ~DummyConvolutionLayer()
216 {
217 dummyGraph.EraseLayer(m_Layer);
218 }
James Conroyee18dc82019-07-17 11:27:46 +0100219
telsoa014fcda012018-03-09 14:13:49 +0000220 ConvolutionLayerType* m_Layer;
221};
222
223template<>
224struct DummyLayer<armnn::Convolution2dLayer>
225 : public DummyConvolutionLayer<armnn::Convolution2dLayer>
226{
227};
228
229template<>
230struct DummyLayer<armnn::DepthwiseConvolution2dLayer>
231 : public DummyConvolutionLayer<armnn::DepthwiseConvolution2dLayer>
232{
233};
234
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100235template<>
236struct DummyLayer<armnn::TransposeConvolution2dLayer>
237 : public DummyConvolutionLayer<armnn::TransposeConvolution2dLayer>
238{
239};
240
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000241template<>
242struct DummyLayer<armnn::DetectionPostProcessLayer>
243{
244 DummyLayer()
245 {
246 m_Layer = dummyGraph.AddLayer<armnn::DetectionPostProcessLayer>(armnn::DetectionPostProcessDescriptor(), "");
247 m_Layer->m_Anchors = std::make_unique<armnn::ScopedCpuTensorHandle>(
248 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
249 }
250
251 ~DummyLayer()
252 {
253 dummyGraph.EraseLayer(m_Layer);
254 }
255
256 armnn::DetectionPostProcessLayer* m_Layer;
257};
258
telsoa01c577f2c2018-08-31 09:22:23 +0100259template <typename LstmLayerType>
260struct DummyLstmLayer
261{
262 DummyLstmLayer()
263 {
264 typename LstmLayerType::DescriptorType desc;
265 desc.m_CifgEnabled = false;
266
267 m_Layer = dummyGraph.AddLayer<LstmLayerType>(armnn::LstmDescriptor(), "");
268 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
269 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
270 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
271 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
272 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
273 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
274 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
275 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
276 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
277 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
278 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
279 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
280 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
281 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
282 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
283 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
284 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
285 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
286
287 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
288 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
289 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
290 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
telsoa01c577f2c2018-08-31 09:22:23 +0100291 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
292 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
293 }
James Conroyee18dc82019-07-17 11:27:46 +0100294
telsoa01c577f2c2018-08-31 09:22:23 +0100295 ~DummyLstmLayer()
296 {
297 dummyGraph.EraseLayer(m_Layer);
298 }
James Conroyee18dc82019-07-17 11:27:46 +0100299
telsoa01c577f2c2018-08-31 09:22:23 +0100300 armnn::LstmLayer* m_Layer;
301};
302
303template<>
304struct DummyLayer<armnn::LstmLayer>
305 : public DummyLstmLayer<armnn::LstmLayer>
306{
307};
308
James Conroy586a9aa2020-03-20 08:49:33 +0000309template <typename QLstmLayerType>
310struct DummyQLstmLayer
311{
312 DummyQLstmLayer()
313 {
314 typename QLstmLayerType::DescriptorType desc;
315 desc.m_CifgEnabled = false;
316 desc.m_PeepholeEnabled = true;
317 desc.m_ProjectionEnabled = true;
318 desc.m_LayerNormEnabled = true;
319
320 m_Layer = dummyGraph.AddLayer<QLstmLayerType>(armnn::QLstmDescriptor(), "qLstm");
321
322 // Basic params
323 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
324 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
325 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
326 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
327 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
328 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
329
330 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
331 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
332 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
333 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
334 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
335 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
336
337 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
338 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
339 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
340 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
341 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
342 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
343
344 // CIFG optional params
345 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
346 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
347 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
348 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
349 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
350 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
351
352 // Projection optional params
353 m_Layer->m_ProjectionParameters.m_ProjectionWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
354 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
355 m_Layer->m_ProjectionParameters.m_ProjectionBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
356 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
357
358 // Peephole optional params
359 m_Layer->m_PeepholeParameters.m_CellToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
360 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
361 m_Layer->m_PeepholeParameters.m_CellToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
362 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
363 m_Layer->m_PeepholeParameters.m_CellToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
364 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
365
366 // Layer normalization optional params
367 m_Layer->m_LayerNormParameters.m_InputLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
368 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
369 m_Layer->m_LayerNormParameters.m_ForgetLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
370 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
371 m_Layer->m_LayerNormParameters.m_CellLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
372 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
373 m_Layer->m_LayerNormParameters.m_OutputLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
374 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
375 }
376
377 ~DummyQLstmLayer()
378 {
379 dummyGraph.EraseLayer(m_Layer);
380 }
381
382 armnn::QLstmLayer* m_Layer;
383};
384
telsoa01c577f2c2018-08-31 09:22:23 +0100385template<>
James Conroyee18dc82019-07-17 11:27:46 +0100386struct DummyLayer<armnn::QuantizedLstmLayer, void>
387{
388 DummyLayer()
389 {
390 m_Layer = dummyGraph.AddLayer<armnn::QuantizedLstmLayer>("");
391
392 m_Layer->m_QuantizedLstmParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000393 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100394 m_Layer->m_QuantizedLstmParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000395 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100396 m_Layer->m_QuantizedLstmParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000397 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100398 m_Layer->m_QuantizedLstmParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000399 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100400
401 m_Layer->m_QuantizedLstmParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000402 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100403 m_Layer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000404 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100405 m_Layer->m_QuantizedLstmParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000406 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100407 m_Layer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000408 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100409
410 m_Layer->m_QuantizedLstmParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
411 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
412 m_Layer->m_QuantizedLstmParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
413 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
414 m_Layer->m_QuantizedLstmParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
415 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
416 m_Layer->m_QuantizedLstmParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
417 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
418 }
419
420 ~DummyLayer()
421 {
422 dummyGraph.EraseLayer(m_Layer);
423 }
424
425 armnn::QuantizedLstmLayer* m_Layer;
426};
427
428template<>
telsoa01c577f2c2018-08-31 09:22:23 +0100429struct DummyLayer<armnn::FullyConnectedLayer>
430{
431 DummyLayer()
432 {
433 armnn::FullyConnectedLayer::DescriptorType desc;
434 m_Layer = dummyGraph.AddLayer<armnn::FullyConnectedLayer>(desc, "");
435 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
436 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
437 }
James Conroyee18dc82019-07-17 11:27:46 +0100438
telsoa01c577f2c2018-08-31 09:22:23 +0100439 ~DummyLayer()
440 {
441 dummyGraph.EraseLayer(m_Layer);
442 }
James Conroyee18dc82019-07-17 11:27:46 +0100443
telsoa01c577f2c2018-08-31 09:22:23 +0100444 armnn::FullyConnectedLayer* m_Layer;
445};
446
telsoa014fcda012018-03-09 14:13:49 +0000447// Tag for giving LayerType entries a unique strong type each.
448template<armnn::LayerType>
449struct Tag{};
450
451#define DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, descType) \
452template<armnn::DataType DataType> \
453struct LayerTypePolicy<armnn::LayerType::name, DataType> \
454{ \
455 using Type = armnn::name##Layer; \
456 using Desc = descType; \
457 using QueueDesc = armnn::name##QueueDescriptor; \
458 constexpr static const char* NameStr = #name; \
Derek Lambertie606b7c2019-10-21 16:51:11 +0100459 constexpr static const bool IsException = false; \
telsoa014fcda012018-03-09 14:13:49 +0000460 \
461 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
462 unsigned int nIn, unsigned int nOut) \
463 { \
464 QueueDesc desc; \
465 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
466 return factory->Create##name(desc, info); \
467 } \
468};
469
telsoa01c577f2c2018-08-31 09:22:23 +0100470// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000471// Use this version for layers whose constructor takes 1 parameter(name).
472#define DECLARE_LAYER_POLICY_1_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, void)
473
telsoa01c577f2c2018-08-31 09:22:23 +0100474// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000475// Use this version for layers whose constructor takes 2 parameters(descriptor and name).
476#define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor)
477
Derek Lamberti013c3902019-10-21 10:46:16 +0100478
479#define DECLARE_LAYER_POLICY_EXCEPTION(name, descType) \
480template<armnn::DataType DataType> \
481struct LayerTypePolicy<armnn::LayerType::name, DataType> \
482{ \
483 using Type = armnn::name##Layer; \
484 using Desc = descType; \
485 constexpr static const char* NameStr = #name; \
Derek Lambertib99ef392019-10-21 14:10:38 +0100486 constexpr static const bool IsException = true; \
Derek Lamberti013c3902019-10-21 10:46:16 +0100487 \
488 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
489 unsigned int nIn, unsigned int nOut) \
490 { \
Jan Eilers8eb25602020-03-09 12:13:48 +0000491 IgnoreUnused(factory, nIn, nOut); \
Derek Lamberti013c3902019-10-21 10:46:16 +0100492 return std::unique_ptr<armnn::IWorkload>(); \
493 } \
494};
495
496#define DECLARE_LAYER_POLICY_EXCEPTION_1_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, void)
497#define DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, armnn::name##Descriptor)
498
telsoa01c577f2c2018-08-31 09:22:23 +0100499// Layer policy template.
telsoa014fcda012018-03-09 14:13:49 +0000500template<armnn::LayerType Type, armnn::DataType DataType>
501struct LayerTypePolicy;
502
503// Every entry in the armnn::LayerType enum must be accounted for below.
504DECLARE_LAYER_POLICY_2_PARAM(Activation)
505
506DECLARE_LAYER_POLICY_1_PARAM(Addition)
507
Nikhil Rajee391d52019-09-05 17:50:44 +0100508DECLARE_LAYER_POLICY_2_PARAM(ArgMinMax)
509
telsoa014fcda012018-03-09 14:13:49 +0000510DECLARE_LAYER_POLICY_2_PARAM(BatchNormalization)
511
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000512DECLARE_LAYER_POLICY_2_PARAM(BatchToSpaceNd)
513
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100514DECLARE_LAYER_POLICY_2_PARAM(Comparison)
515
Jim Flynne242f2d2019-05-22 14:24:13 +0100516DECLARE_LAYER_POLICY_2_PARAM(Concat)
517
telsoa014fcda012018-03-09 14:13:49 +0000518DECLARE_LAYER_POLICY_1_PARAM(Constant)
519
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000520DECLARE_LAYER_POLICY_1_PARAM(ConvertBf16ToFp32)
521
telsoa01c577f2c2018-08-31 09:22:23 +0100522DECLARE_LAYER_POLICY_1_PARAM(ConvertFp16ToFp32)
523
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000524DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToBf16)
525
telsoa01c577f2c2018-08-31 09:22:23 +0100526DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToFp16)
527
telsoa014fcda012018-03-09 14:13:49 +0000528DECLARE_LAYER_POLICY_2_PARAM(Convolution2d)
529
530DECLARE_LAYER_POLICY_1_PARAM(MemCopy)
531
Derek Lambertif674aa02019-08-01 15:56:25 +0100532DECLARE_LAYER_POLICY_1_PARAM(MemImport)
533
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000534DECLARE_LAYER_POLICY_1_PARAM(Debug)
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000535
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100536DECLARE_LAYER_POLICY_2_PARAM(DepthToSpace)
537
telsoa014fcda012018-03-09 14:13:49 +0000538DECLARE_LAYER_POLICY_2_PARAM(DepthwiseConvolution2d)
539
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000540DECLARE_LAYER_POLICY_1_PARAM(Dequantize)
541
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000542DECLARE_LAYER_POLICY_2_PARAM(DetectionPostProcess)
543
josh minor4a3c6102020-01-06 16:40:46 -0600544DECLARE_LAYER_POLICY_2_PARAM(ElementwiseUnary)
545
telsoa014fcda012018-03-09 14:13:49 +0000546DECLARE_LAYER_POLICY_2_PARAM(FakeQuantization)
547
Ryan OSheaec6c6802020-06-05 17:17:06 +0100548DECLARE_LAYER_POLICY_2_PARAM(Fill)
549
telsoa014fcda012018-03-09 14:13:49 +0000550DECLARE_LAYER_POLICY_1_PARAM(Floor)
551
552DECLARE_LAYER_POLICY_2_PARAM(FullyConnected)
553
Teresa Charlin52664732020-06-29 16:27:03 +0100554DECLARE_LAYER_POLICY_2_PARAM(Gather)
narpra01b89b05f2019-01-16 09:53:09 +0000555
telsoa014fcda012018-03-09 14:13:49 +0000556DECLARE_LAYER_POLICY_CUSTOM_PARAM(Input, armnn::LayerBindingId)
557
Kevin Mayce5045a2019-10-02 14:07:47 +0100558DECLARE_LAYER_POLICY_2_PARAM(InstanceNormalization)
559
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100560DECLARE_LAYER_POLICY_2_PARAM(L2Normalization)
telsoa014fcda012018-03-09 14:13:49 +0000561
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100562DECLARE_LAYER_POLICY_2_PARAM(LogSoftmax)
563
telsoa01c577f2c2018-08-31 09:22:23 +0100564DECLARE_LAYER_POLICY_2_PARAM(Lstm)
565
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000566DECLARE_LAYER_POLICY_1_PARAM(Maximum)
567
narpra0132b90462018-09-13 11:07:48 +0100568DECLARE_LAYER_POLICY_2_PARAM(Mean)
569
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100570DECLARE_LAYER_POLICY_1_PARAM(Merge)
571
kevmay0190539692018-11-29 08:40:19 +0000572DECLARE_LAYER_POLICY_1_PARAM(Minimum)
573
telsoa014fcda012018-03-09 14:13:49 +0000574DECLARE_LAYER_POLICY_1_PARAM(Multiplication)
575
576DECLARE_LAYER_POLICY_2_PARAM(Normalization)
577
578DECLARE_LAYER_POLICY_CUSTOM_PARAM(Output, armnn::LayerBindingId)
579
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100580DECLARE_LAYER_POLICY_2_PARAM(Pad)
581
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000582DECLARE_LAYER_POLICY_1_PARAM(Quantize)
583
telsoa014fcda012018-03-09 14:13:49 +0000584DECLARE_LAYER_POLICY_2_PARAM(Permute)
585
586DECLARE_LAYER_POLICY_2_PARAM(Pooling2d)
587
Matteo Martincigh49124022019-01-11 13:25:59 +0000588DECLARE_LAYER_POLICY_2_PARAM(PreCompiled)
589
Matteo Martincigh0e406ee2019-06-12 15:42:18 +0100590DECLARE_LAYER_POLICY_1_PARAM(Prelu)
591
James Conroy586a9aa2020-03-20 08:49:33 +0000592DECLARE_LAYER_POLICY_2_PARAM(QLstm)
593
James Conroyee18dc82019-07-17 11:27:46 +0100594DECLARE_LAYER_POLICY_1_PARAM(QuantizedLstm)
595
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100596DECLARE_LAYER_POLICY_1_PARAM(Division)
597
Finn Williams2605b232020-06-10 15:53:46 +0100598DECLARE_LAYER_POLICY_1_PARAM(Rank)
599
Teresa Charlina9075df2019-06-27 15:41:57 +0100600DECLARE_LAYER_POLICY_2_PARAM(Resize)
601
telsoa01c577f2c2018-08-31 09:22:23 +0100602DECLARE_LAYER_POLICY_2_PARAM(Reshape)
603
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100604DECLARE_LAYER_POLICY_2_PARAM(Slice)
605
telsoa014fcda012018-03-09 14:13:49 +0000606DECLARE_LAYER_POLICY_2_PARAM(Softmax)
607
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000608DECLARE_LAYER_POLICY_2_PARAM(SpaceToBatchNd)
609
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100610DECLARE_LAYER_POLICY_2_PARAM(SpaceToDepth)
611
telsoa014fcda012018-03-09 14:13:49 +0000612DECLARE_LAYER_POLICY_2_PARAM(Splitter)
613
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100614DECLARE_LAYER_POLICY_2_PARAM(Stack)
615
Derek Lamberti013c3902019-10-21 10:46:16 +0100616DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(StandIn)
617
Conor Kennedy430b5d82018-11-14 15:28:28 +0000618DECLARE_LAYER_POLICY_2_PARAM(StridedSlice)
619
David Beckc2044fe2018-09-05 15:00:38 +0100620DECLARE_LAYER_POLICY_1_PARAM(Subtraction)
telsoa014fcda012018-03-09 14:13:49 +0000621
Sadik Armaganeff363d2019-04-05 15:25:46 +0100622DECLARE_LAYER_POLICY_1_PARAM(Switch)
623
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000624DECLARE_LAYER_POLICY_2_PARAM(Transpose)
625
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100626DECLARE_LAYER_POLICY_2_PARAM(TransposeConvolution2d)
627
telsoa014fcda012018-03-09 14:13:49 +0000628
629// Generic implementation to get the number of input slots for a given layer type;
630template<armnn::LayerType Type>
631unsigned int GetNumInputs(const armnn::Layer& layer)
632{
633 return layer.GetNumInputSlots();
634}
635
636// Generic implementation to get the number of output slots for a given layer type;
637template<armnn::LayerType Type>
638unsigned int GetNumOutputs(const armnn::Layer& layer)
639{
640 return layer.GetNumOutputSlots();
641}
642
643template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100644unsigned int GetNumInputs<armnn::LayerType::Concat>(const armnn::Layer& layer)
telsoa014fcda012018-03-09 14:13:49 +0000645{
Jan Eilers8eb25602020-03-09 12:13:48 +0000646 IgnoreUnused(layer);
telsoa014fcda012018-03-09 14:13:49 +0000647 return 2;
648}
649
telsoa01c577f2c2018-08-31 09:22:23 +0100650// Tests that the IsLayerSupported() function returns the correct value.
651// We determined the correct value by *trying* to create the relevant workload and seeing if it matches what we expect.
telsoa014fcda012018-03-09 14:13:49 +0000652// Returns true if expectations are met, otherwise returns false.
653template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
654bool IsLayerSupportedTest(FactoryType *factory, Tag<Type>)
655{
656 using LayerPolicy = LayerTypePolicy<Type, DataType>;
657 using LayerType = typename LayerPolicy::Type;
658 using LayerDesc = typename LayerPolicy::Desc;
659 DummyLayer<LayerType, LayerDesc> layer;
660
Derek Lambertib99ef392019-10-21 14:10:38 +0100661 if (LayerPolicy::IsException) //Don't test exceptions to the rule.
662 {
663 return true;
664 }
665
telsoa014fcda012018-03-09 14:13:49 +0000666 unsigned int numIn = GetNumInputs<Type>(*layer.m_Layer);
667 unsigned int numOut = GetNumOutputs<Type>(*layer.m_Layer);
668
telsoa01c577f2c2018-08-31 09:22:23 +0100669 // Make another dummy layer just to make IsLayerSupported have valid inputs.
telsoa014fcda012018-03-09 14:13:49 +0000670 DummyLayer<armnn::ConstantLayer, void> previousLayer;
telsoa01c577f2c2018-08-31 09:22:23 +0100671 // Set output of the previous layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000672 armnn::TensorInfo output = MakeDummyTensorInfo<DataType>();
673 previousLayer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
telsoa01c577f2c2018-08-31 09:22:23 +0100674 // Connect all outputs of the previous layer to inputs of tested layer.
telsoa014fcda012018-03-09 14:13:49 +0000675 for (unsigned int i = 0; i < numIn; i++)
676 {
677 armnn::IOutputSlot& previousLayerOutputSlot = previousLayer.m_Layer->GetOutputSlot(0);
678 armnn::IInputSlot& layerInputSlot = layer.m_Layer->GetInputSlot(i);
679 previousLayerOutputSlot.Connect(layerInputSlot);
680 }
telsoa01c577f2c2018-08-31 09:22:23 +0100681 // Set outputs of tested layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000682 for (unsigned int i = 0; i < numOut; i++)
683 {
684 layer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
685 }
686
687 std::string layerName = LayerPolicy::NameStr;
688 std::string reasonIfUnsupported;
689 if (FactoryType::IsLayerSupported(*layer.m_Layer, DataType, reasonIfUnsupported))
690 {
691 std::string errorMsg = " layer expected support but found none.";
692 try
693 {
694 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() != nullptr;
Matteo Martincighfbebcbd2018-10-16 09:45:08 +0100695 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000696 return retVal;
697 }
telsoa01c577f2c2018-08-31 09:22:23 +0100698 catch(const armnn::InvalidArgumentException& e)
telsoa014fcda012018-03-09 14:13:49 +0000699 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000700 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000701 // This is ok since we throw InvalidArgumentException when creating the dummy workload.
702 return true;
703 }
704 catch(const std::exception& e)
705 {
706 errorMsg = e.what();
707 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
708 return false;
709 }
telsoa01c577f2c2018-08-31 09:22:23 +0100710 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000711 {
712 errorMsg = "Unexpected error while testing support for ";
713 BOOST_TEST_ERROR(errorMsg << layerName);
714 return false;
715 }
716 }
717 else
718 {
719 std::string errorMsg = "layer expected no support (giving reason: " + reasonIfUnsupported + ") but found some.";
720 try
721 {
722 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() == nullptr;
723 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
724 return retVal;
725 }
726 // These two exceptions are ok: For workloads that are partially supported, attempting to instantiate them
727 // using parameters that make IsLayerSupported() return false should throw an
telsoa01c577f2c2018-08-31 09:22:23 +0100728 // InvalidArgumentException or UnimplementedException.
telsoa014fcda012018-03-09 14:13:49 +0000729 catch(const armnn::InvalidArgumentException& e)
730 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000731 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000732 return true;
733 }
telsoa01c577f2c2018-08-31 09:22:23 +0100734 catch(const armnn::UnimplementedException& e)
telsoa014fcda012018-03-09 14:13:49 +0000735 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000736 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000737 return true;
738 }
739 catch(const std::exception& e)
740 {
741 errorMsg = e.what();
742 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
743 return false;
744 }
telsoa01c577f2c2018-08-31 09:22:23 +0100745 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000746 {
747 errorMsg = "Unexpected error while testing support for ";
748 BOOST_TEST_ERROR(errorMsg << layerName);
749 return false;
750 }
751 }
752}
753
telsoa01c577f2c2018-08-31 09:22:23 +0100754// Helper function to compute the next type in the LayerType enum.
telsoa014fcda012018-03-09 14:13:49 +0000755constexpr armnn::LayerType NextType(armnn::LayerType type)
756{
757 return static_cast<armnn::LayerType>(static_cast<int>(type)+1);
758}
759
telsoa01c577f2c2018-08-31 09:22:23 +0100760// Termination function for determining the end of the LayerType enumeration.
telsoa014fcda012018-03-09 14:13:49 +0000761template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
762bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<armnn::LayerType::LastLayer>)
763{
764 return IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000765}
telsoa014fcda012018-03-09 14:13:49 +0000766
telsoa01c577f2c2018-08-31 09:22:23 +0100767// Recursive function to test and enter in the LayerType enum and then iterate on the next entry.
telsoa014fcda012018-03-09 14:13:49 +0000768template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
769bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<Type>)
770{
771 bool v = IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
772
773 return v &&
774 IsLayerSupportedTestsImpl<FactoryType, DataType, NextType(Type)>
775 (factory, Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000776}
telsoa014fcda012018-03-09 14:13:49 +0000777
778// Helper function to pass through to the test framework.
779template<typename FactoryType, armnn::DataType DataType>
780bool IsLayerSupportedTests(FactoryType *factory)
781{
782 return IsLayerSupportedTestsImpl<FactoryType, DataType>(factory, Tag<armnn::LayerType::FirstLayer>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000783}
telsoa014fcda012018-03-09 14:13:49 +0000784
785template<armnn::LayerType Type>
786bool TestLayerTypeMatches()
787{
788 using LayerPolicy = LayerTypePolicy<Type, armnn::DataType::Float32>;
789 using LayerType = typename LayerPolicy::Type;
790 using LayerDesc = typename LayerPolicy::Desc;
791 DummyLayer<LayerType, LayerDesc> layer;
792
793 std::stringstream ss;
794 ss << LayerPolicy::NameStr << " layer type mismatches expected layer type value.";
795 bool v = Type == layer.m_Layer->GetType();
796 BOOST_CHECK_MESSAGE(v, ss.str());
797 return v;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000798}
telsoa014fcda012018-03-09 14:13:49 +0000799
800template<armnn::LayerType Type>
801bool LayerTypeMatchesTestImpl(Tag<armnn::LayerType::LastLayer>)
802{
803 return TestLayerTypeMatches<Type>();
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000804}
telsoa014fcda012018-03-09 14:13:49 +0000805
806template<armnn::LayerType Type>
807bool LayerTypeMatchesTestImpl(Tag<Type>)
808{
809 return TestLayerTypeMatches<Type>() &&
810 LayerTypeMatchesTestImpl<NextType(Type)>(Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000811}
telsoa014fcda012018-03-09 14:13:49 +0000812
telsoa01c577f2c2018-08-31 09:22:23 +0100813template<typename FactoryType, typename LayerType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
814bool IsConvertLayerSupportedTests(std::string& reasonIfUnsupported)
815{
816 armnn::Graph graph;
817 LayerType* const layer = graph.AddLayer<LayerType>("LayerName");
818
819 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
820 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
821
822 armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, InputDataType);
823 armnn::TensorInfo outputTensorInfo({1, 3, 2, 3}, OutputDataType);
824
825 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
826 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
827 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
828 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
829
830 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
831
832 return result;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000833}
telsoa01c577f2c2018-08-31 09:22:23 +0100834
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000835template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
836bool IsMeanLayerSupportedTests(std::string& reasonIfUnsupported)
837{
838 armnn::Graph graph;
839 static const std::vector<unsigned> axes = {1, 0};
840 armnn::MeanDescriptor desc(axes, false);
841
842 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
843
844 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
845 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
846
847 armnn::TensorInfo inputTensorInfo({4, 3, 2}, InputDataType);
848 armnn::TensorInfo outputTensorInfo({2}, OutputDataType);
849
850 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
851 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
852 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
853 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
854
855 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
856
857 return result;
858}
859
James Conroy4d1ff582019-06-10 17:06:39 +0100860// Tests that IsMeanSupported fails when input tensor dimensions
861// do not match output tensor dimensions when keepDims == true
862template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
863bool IsMeanLayerNotSupportedTests(std::string& reasonIfUnsupported)
864{
865 armnn::Graph graph;
866 static const std::vector<unsigned> axes = {};
867 // Set keepDims == true
868 armnn::MeanDescriptor desc(axes, true);
869
870 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
871
872 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
873 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
874
875 // Mismatching number of tensor dimensions
876 armnn::TensorInfo inputTensorInfo({1, 1, 1, 1}, InputDataType);
877 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
878
879 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
880 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
881 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
882 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
883
884 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
885
886 return result;
887}
888
Mike Kelly0886ac42020-04-27 09:55:40 +0100889template<typename FactoryType, armnn::DataType OutputDataType>
890bool IsConstantLayerSupportedTests(std::string& reasonIfUnsupported)
891{
892 armnn::Graph graph;
893
894 armnn::Layer* const layer = graph.AddLayer<armnn::ConstantLayer>("ConstantLayerName");
895 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "OutputLayerName");
896
897 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
898
899 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
900 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
901
902 bool result = FactoryType::IsLayerSupported(*layer, OutputDataType, reasonIfUnsupported);
903
904 return result;
905}
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000906
telsoa014fcda012018-03-09 14:13:49 +0000907} //namespace