blob: c7d1dd21827d321dcc7cf492f479f68cc7b93f12 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00007#include <Graph.hpp>
8
Jim Flynn68db06f2020-10-06 10:14:50 +01009#include <backendsCommon/MapWorkload.hpp>
Jim Flynn3a40ea52020-10-08 11:42:30 +010010#include <backendsCommon/UnmapWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000011#include <backendsCommon/WorkloadFactory.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
Jan Eilers8eb25602020-03-09 12:13:48 +000013#include <armnn/utility/IgnoreUnused.hpp>
telsoa014fcda012018-03-09 14:13:49 +000014
15namespace
16{
17armnn::Graph dummyGraph;
18
telsoa01c577f2c2018-08-31 09:22:23 +010019// Make a dummy TensorInfo object.
telsoa014fcda012018-03-09 14:13:49 +000020template<armnn::DataType DataType>
21armnn::TensorInfo MakeDummyTensorInfo()
22{
Teresa Charlin33d58272020-01-28 12:24:34 +000023 return armnn::TensorInfo({2,2,2,2}, DataType, 1.0, 0);
telsoa014fcda012018-03-09 14:13:49 +000024}
25
26
27// Make a dummy WorkloadInfo using a dummy TensorInfo.
28template<armnn::DataType DataType>
29armnn::WorkloadInfo MakeDummyWorkloadInfo(unsigned int numInputs, unsigned int numOutputs)
30{
31 armnn::WorkloadInfo info;
James Conroyee18dc82019-07-17 11:27:46 +010032
telsoa014fcda012018-03-09 14:13:49 +000033 for (unsigned int i=0; i < numInputs; i++)
34 {
35 info.m_InputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
36 }
James Conroyee18dc82019-07-17 11:27:46 +010037
telsoa014fcda012018-03-09 14:13:49 +000038 for (unsigned int o=0; o < numOutputs; o++)
39 {
40 info.m_OutputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
41 }
James Conroyee18dc82019-07-17 11:27:46 +010042
telsoa014fcda012018-03-09 14:13:49 +000043 return info;
44}
45
telsoa01c577f2c2018-08-31 09:22:23 +010046// Template class to create a dummy layer (2 parameters).
telsoa014fcda012018-03-09 14:13:49 +000047template<typename LayerType, typename DescType = typename LayerType::DescriptorType>
48struct DummyLayer
49{
50 DummyLayer()
51 {
52 m_Layer = dummyGraph.AddLayer<LayerType>(DescType(), "");
53 }
James Conroyee18dc82019-07-17 11:27:46 +010054
telsoa014fcda012018-03-09 14:13:49 +000055 ~DummyLayer()
56 {
57 dummyGraph.EraseLayer(m_Layer);
58 }
James Conroyee18dc82019-07-17 11:27:46 +010059
telsoa014fcda012018-03-09 14:13:49 +000060 LayerType* m_Layer;
61};
62
telsoa01c577f2c2018-08-31 09:22:23 +010063// Template class to create a dummy layer (1 parameter).
telsoa014fcda012018-03-09 14:13:49 +000064template<typename LayerType>
65struct DummyLayer<LayerType, void>
66{
67 DummyLayer()
68 {
69 m_Layer = dummyGraph.AddLayer<LayerType>("");
70 }
James Conroyee18dc82019-07-17 11:27:46 +010071
telsoa014fcda012018-03-09 14:13:49 +000072 ~DummyLayer()
73 {
74 dummyGraph.EraseLayer(m_Layer);
75 }
James Conroyee18dc82019-07-17 11:27:46 +010076
telsoa014fcda012018-03-09 14:13:49 +000077 LayerType* m_Layer;
78};
79
80template<>
telsoa01c577f2c2018-08-31 09:22:23 +010081struct DummyLayer<armnn::BatchNormalizationLayer>
82{
83 DummyLayer()
84 {
85 m_Layer = dummyGraph.AddLayer<armnn::BatchNormalizationLayer>(armnn::BatchNormalizationDescriptor(), "");
86 m_Layer->m_Mean = std::make_unique<armnn::ScopedCpuTensorHandle>(
87 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
88 m_Layer->m_Variance = std::make_unique<armnn::ScopedCpuTensorHandle>(
89 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
90 m_Layer->m_Beta = std::make_unique<armnn::ScopedCpuTensorHandle>(
91 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
92 m_Layer->m_Gamma = std::make_unique<armnn::ScopedCpuTensorHandle>(
93 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
94 }
James Conroyee18dc82019-07-17 11:27:46 +010095
telsoa01c577f2c2018-08-31 09:22:23 +010096 ~DummyLayer()
97 {
98 dummyGraph.EraseLayer(m_Layer);
99 }
telsoa01c577f2c2018-08-31 09:22:23 +0100100
James Conroyee18dc82019-07-17 11:27:46 +0100101 armnn::BatchNormalizationLayer* m_Layer;
telsoa01c577f2c2018-08-31 09:22:23 +0100102};
103
104template<>
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000105struct DummyLayer<armnn::BatchToSpaceNdLayer>
106{
107 DummyLayer()
108 {
109 m_Layer = dummyGraph.AddLayer<armnn::BatchToSpaceNdLayer>(armnn::BatchToSpaceNdDescriptor(), "");
110 }
James Conroyee18dc82019-07-17 11:27:46 +0100111
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000112 ~DummyLayer()
113 {
114 dummyGraph.EraseLayer(m_Layer);
115 }
James Conroyee18dc82019-07-17 11:27:46 +0100116
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000117 armnn::BatchToSpaceNdLayer* m_Layer;
118};
119
120template<>
telsoa014fcda012018-03-09 14:13:49 +0000121struct DummyLayer<armnn::ConstantLayer, void>
122{
123 DummyLayer()
124 {
telsoa01c577f2c2018-08-31 09:22:23 +0100125 m_Layer = dummyGraph.AddLayer<armnn::ConstantLayer>("");
telsoa014fcda012018-03-09 14:13:49 +0000126 }
James Conroyee18dc82019-07-17 11:27:46 +0100127
telsoa014fcda012018-03-09 14:13:49 +0000128 ~DummyLayer()
129 {
130 dummyGraph.EraseLayer(m_Layer);
131 }
James Conroyee18dc82019-07-17 11:27:46 +0100132
telsoa014fcda012018-03-09 14:13:49 +0000133 armnn::ConstantLayer* m_Layer;
134};
135
136template<>
137struct DummyLayer<armnn::InputLayer, armnn::LayerBindingId>
138{
139 DummyLayer()
140 {
141 m_Layer = dummyGraph.AddLayer<armnn::InputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000142 }
James Conroyee18dc82019-07-17 11:27:46 +0100143
telsoa014fcda012018-03-09 14:13:49 +0000144 ~DummyLayer()
145 {
146 dummyGraph.EraseLayer(m_Layer);
147 }
James Conroyee18dc82019-07-17 11:27:46 +0100148
telsoa014fcda012018-03-09 14:13:49 +0000149 armnn::InputLayer* m_Layer;
150};
151
152template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100153struct DummyLayer<armnn::ConcatLayer>
telsoa014fcda012018-03-09 14:13:49 +0000154{
155 DummyLayer()
156 {
157 armnn::OriginsDescriptor desc(2);
Jim Flynne242f2d2019-05-22 14:24:13 +0100158 m_Layer = dummyGraph.AddLayer<armnn::ConcatLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000159 }
James Conroyee18dc82019-07-17 11:27:46 +0100160
telsoa014fcda012018-03-09 14:13:49 +0000161 ~DummyLayer()
162 {
163 dummyGraph.EraseLayer(m_Layer);
164 }
James Conroyee18dc82019-07-17 11:27:46 +0100165
Jim Flynne242f2d2019-05-22 14:24:13 +0100166 armnn::ConcatLayer* m_Layer;
telsoa014fcda012018-03-09 14:13:49 +0000167};
168
169template<>
Jim Flynn68db06f2020-10-06 10:14:50 +0100170struct DummyLayer<armnn::MapLayer, void>
171{
172 DummyLayer()
173 {
174 m_Layer = dummyGraph.AddLayer<armnn::MapLayer>("");
175 }
176
177 ~DummyLayer()
178 {
179 dummyGraph.EraseLayer(m_Layer);
180 }
181
182 armnn::MapLayer* m_Layer;
183};
184
185template<>
telsoa014fcda012018-03-09 14:13:49 +0000186struct DummyLayer<armnn::OutputLayer, armnn::LayerBindingId>
187{
188 DummyLayer()
189 {
190 m_Layer = dummyGraph.AddLayer<armnn::OutputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000191 }
James Conroyee18dc82019-07-17 11:27:46 +0100192
telsoa014fcda012018-03-09 14:13:49 +0000193 ~DummyLayer()
194 {
195 dummyGraph.EraseLayer(m_Layer);
196 }
James Conroyee18dc82019-07-17 11:27:46 +0100197
telsoa014fcda012018-03-09 14:13:49 +0000198 armnn::OutputLayer* m_Layer;
199};
200
201template<>
202struct DummyLayer<armnn::SplitterLayer>
203{
204 DummyLayer()
205 {
206 armnn::ViewsDescriptor desc(1);
207 m_Layer = dummyGraph.AddLayer<armnn::SplitterLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000208 }
James Conroyee18dc82019-07-17 11:27:46 +0100209
telsoa014fcda012018-03-09 14:13:49 +0000210 ~DummyLayer()
211 {
212 dummyGraph.EraseLayer(m_Layer);
213 }
James Conroyee18dc82019-07-17 11:27:46 +0100214
telsoa014fcda012018-03-09 14:13:49 +0000215 armnn::SplitterLayer* m_Layer;
216};
217
Jim Flynn3a40ea52020-10-08 11:42:30 +0100218template<>
219struct DummyLayer<armnn::UnmapLayer, void>
220{
221 DummyLayer()
222 {
223 m_Layer = dummyGraph.AddLayer<armnn::UnmapLayer>("");
224 }
225
226 ~DummyLayer()
227 {
228 dummyGraph.EraseLayer(m_Layer);
229 }
230
231 armnn::UnmapLayer* m_Layer;
232};
233
telsoa014fcda012018-03-09 14:13:49 +0000234template <typename ConvolutionLayerType>
235struct DummyConvolutionLayer
236{
237 DummyConvolutionLayer()
238 {
239 typename ConvolutionLayerType::DescriptorType desc;
James Conroy663c1842019-11-01 15:21:48 +0000240 desc.m_StrideX = 1;
241 desc.m_StrideY = 1;
telsoa014fcda012018-03-09 14:13:49 +0000242 m_Layer = dummyGraph.AddLayer<ConvolutionLayerType>(desc, "");
243 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
244 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
245 m_Layer->m_Bias = std::make_unique<armnn::ScopedCpuTensorHandle>(
246 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
247 }
James Conroyee18dc82019-07-17 11:27:46 +0100248
telsoa014fcda012018-03-09 14:13:49 +0000249 ~DummyConvolutionLayer()
250 {
251 dummyGraph.EraseLayer(m_Layer);
252 }
James Conroyee18dc82019-07-17 11:27:46 +0100253
telsoa014fcda012018-03-09 14:13:49 +0000254 ConvolutionLayerType* m_Layer;
255};
256
257template<>
258struct DummyLayer<armnn::Convolution2dLayer>
259 : public DummyConvolutionLayer<armnn::Convolution2dLayer>
260{
261};
262
263template<>
264struct DummyLayer<armnn::DepthwiseConvolution2dLayer>
265 : public DummyConvolutionLayer<armnn::DepthwiseConvolution2dLayer>
266{
267};
268
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100269template<>
270struct DummyLayer<armnn::TransposeConvolution2dLayer>
271 : public DummyConvolutionLayer<armnn::TransposeConvolution2dLayer>
272{
273};
274
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000275template<>
276struct DummyLayer<armnn::DetectionPostProcessLayer>
277{
278 DummyLayer()
279 {
280 m_Layer = dummyGraph.AddLayer<armnn::DetectionPostProcessLayer>(armnn::DetectionPostProcessDescriptor(), "");
281 m_Layer->m_Anchors = std::make_unique<armnn::ScopedCpuTensorHandle>(
282 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
283 }
284
285 ~DummyLayer()
286 {
287 dummyGraph.EraseLayer(m_Layer);
288 }
289
290 armnn::DetectionPostProcessLayer* m_Layer;
291};
292
telsoa01c577f2c2018-08-31 09:22:23 +0100293template <typename LstmLayerType>
294struct DummyLstmLayer
295{
296 DummyLstmLayer()
297 {
298 typename LstmLayerType::DescriptorType desc;
299 desc.m_CifgEnabled = false;
300
301 m_Layer = dummyGraph.AddLayer<LstmLayerType>(armnn::LstmDescriptor(), "");
302 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
303 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
304 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
305 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
306 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
307 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
308 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
309 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
310 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
311 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
312 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
313 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
314 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
315 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
316 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
317 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
318 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
319 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
320
321 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
322 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
323 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
324 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
telsoa01c577f2c2018-08-31 09:22:23 +0100325 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
326 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
327 }
James Conroyee18dc82019-07-17 11:27:46 +0100328
telsoa01c577f2c2018-08-31 09:22:23 +0100329 ~DummyLstmLayer()
330 {
331 dummyGraph.EraseLayer(m_Layer);
332 }
James Conroyee18dc82019-07-17 11:27:46 +0100333
telsoa01c577f2c2018-08-31 09:22:23 +0100334 armnn::LstmLayer* m_Layer;
335};
336
337template<>
338struct DummyLayer<armnn::LstmLayer>
339 : public DummyLstmLayer<armnn::LstmLayer>
340{
341};
342
James Conroy586a9aa2020-03-20 08:49:33 +0000343template <typename QLstmLayerType>
344struct DummyQLstmLayer
345{
346 DummyQLstmLayer()
347 {
348 typename QLstmLayerType::DescriptorType desc;
349 desc.m_CifgEnabled = false;
350 desc.m_PeepholeEnabled = true;
351 desc.m_ProjectionEnabled = true;
352 desc.m_LayerNormEnabled = true;
353
354 m_Layer = dummyGraph.AddLayer<QLstmLayerType>(armnn::QLstmDescriptor(), "qLstm");
355
356 // Basic params
357 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
358 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
359 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
360 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
361 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
362 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
363
364 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
365 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
366 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
367 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
368 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
369 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
370
371 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
372 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
373 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
374 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
375 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
376 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
377
378 // CIFG optional params
379 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
380 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
381 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
382 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
383 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
384 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
385
386 // Projection optional params
387 m_Layer->m_ProjectionParameters.m_ProjectionWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
388 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
389 m_Layer->m_ProjectionParameters.m_ProjectionBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
390 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
391
392 // Peephole optional params
393 m_Layer->m_PeepholeParameters.m_CellToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
394 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
395 m_Layer->m_PeepholeParameters.m_CellToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
396 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
397 m_Layer->m_PeepholeParameters.m_CellToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
398 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
399
400 // Layer normalization optional params
401 m_Layer->m_LayerNormParameters.m_InputLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
402 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
403 m_Layer->m_LayerNormParameters.m_ForgetLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
404 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
405 m_Layer->m_LayerNormParameters.m_CellLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
406 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
407 m_Layer->m_LayerNormParameters.m_OutputLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
408 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
409 }
410
411 ~DummyQLstmLayer()
412 {
413 dummyGraph.EraseLayer(m_Layer);
414 }
415
416 armnn::QLstmLayer* m_Layer;
417};
418
telsoa01c577f2c2018-08-31 09:22:23 +0100419template<>
James Conroyee18dc82019-07-17 11:27:46 +0100420struct DummyLayer<armnn::QuantizedLstmLayer, void>
421{
422 DummyLayer()
423 {
424 m_Layer = dummyGraph.AddLayer<armnn::QuantizedLstmLayer>("");
425
426 m_Layer->m_QuantizedLstmParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000427 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100428 m_Layer->m_QuantizedLstmParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000429 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100430 m_Layer->m_QuantizedLstmParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000431 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100432 m_Layer->m_QuantizedLstmParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000433 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100434
435 m_Layer->m_QuantizedLstmParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000436 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100437 m_Layer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000438 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100439 m_Layer->m_QuantizedLstmParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000440 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100441 m_Layer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000442 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100443
444 m_Layer->m_QuantizedLstmParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
445 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
446 m_Layer->m_QuantizedLstmParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
447 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
448 m_Layer->m_QuantizedLstmParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
449 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
450 m_Layer->m_QuantizedLstmParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
451 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
452 }
453
454 ~DummyLayer()
455 {
456 dummyGraph.EraseLayer(m_Layer);
457 }
458
459 armnn::QuantizedLstmLayer* m_Layer;
460};
461
462template<>
telsoa01c577f2c2018-08-31 09:22:23 +0100463struct DummyLayer<armnn::FullyConnectedLayer>
464{
465 DummyLayer()
466 {
467 armnn::FullyConnectedLayer::DescriptorType desc;
468 m_Layer = dummyGraph.AddLayer<armnn::FullyConnectedLayer>(desc, "");
469 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
470 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
471 }
James Conroyee18dc82019-07-17 11:27:46 +0100472
telsoa01c577f2c2018-08-31 09:22:23 +0100473 ~DummyLayer()
474 {
475 dummyGraph.EraseLayer(m_Layer);
476 }
James Conroyee18dc82019-07-17 11:27:46 +0100477
telsoa01c577f2c2018-08-31 09:22:23 +0100478 armnn::FullyConnectedLayer* m_Layer;
479};
480
telsoa014fcda012018-03-09 14:13:49 +0000481// Tag for giving LayerType entries a unique strong type each.
482template<armnn::LayerType>
483struct Tag{};
484
485#define DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, descType) \
486template<armnn::DataType DataType> \
487struct LayerTypePolicy<armnn::LayerType::name, DataType> \
488{ \
489 using Type = armnn::name##Layer; \
490 using Desc = descType; \
491 using QueueDesc = armnn::name##QueueDescriptor; \
492 constexpr static const char* NameStr = #name; \
Derek Lambertie606b7c2019-10-21 16:51:11 +0100493 constexpr static const bool IsException = false; \
telsoa014fcda012018-03-09 14:13:49 +0000494 \
495 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
496 unsigned int nIn, unsigned int nOut) \
497 { \
498 QueueDesc desc; \
499 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
500 return factory->Create##name(desc, info); \
501 } \
502};
503
Jim Flynn68db06f2020-10-06 10:14:50 +0100504#define DECLARE_LAYER_POLICY_MAP_PARAM(name, descType) \
505template<armnn::DataType DataType> \
506struct LayerTypePolicy<armnn::LayerType::name, DataType> \
507{ \
508 using Type = armnn::name##Layer; \
509 using Desc = descType; \
510 using QueueDesc = armnn::name##QueueDescriptor; \
511 using Workload = armnn::name##Workload; \
512 constexpr static const char* NameStr = #name; \
513 constexpr static const bool IsException = false; \
514 \
515 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory* factory, \
516 unsigned int nIn, unsigned int nOut) \
517 { \
518 IgnoreUnused(factory); \
519 QueueDesc desc; \
520 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
521 return std::make_unique<armnn::name##Workload>(desc, info); \
522 } \
523};
524
telsoa01c577f2c2018-08-31 09:22:23 +0100525// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000526// Use this version for layers whose constructor takes 1 parameter(name).
527#define DECLARE_LAYER_POLICY_1_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, void)
528
telsoa01c577f2c2018-08-31 09:22:23 +0100529// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000530// Use this version for layers whose constructor takes 2 parameters(descriptor and name).
531#define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor)
532
Derek Lamberti013c3902019-10-21 10:46:16 +0100533
534#define DECLARE_LAYER_POLICY_EXCEPTION(name, descType) \
535template<armnn::DataType DataType> \
536struct LayerTypePolicy<armnn::LayerType::name, DataType> \
537{ \
538 using Type = armnn::name##Layer; \
539 using Desc = descType; \
540 constexpr static const char* NameStr = #name; \
Derek Lambertib99ef392019-10-21 14:10:38 +0100541 constexpr static const bool IsException = true; \
Derek Lamberti013c3902019-10-21 10:46:16 +0100542 \
543 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
544 unsigned int nIn, unsigned int nOut) \
545 { \
Jan Eilers8eb25602020-03-09 12:13:48 +0000546 IgnoreUnused(factory, nIn, nOut); \
Derek Lamberti013c3902019-10-21 10:46:16 +0100547 return std::unique_ptr<armnn::IWorkload>(); \
548 } \
549};
550
551#define DECLARE_LAYER_POLICY_EXCEPTION_1_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, void)
552#define DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, armnn::name##Descriptor)
553
telsoa01c577f2c2018-08-31 09:22:23 +0100554// Layer policy template.
telsoa014fcda012018-03-09 14:13:49 +0000555template<armnn::LayerType Type, armnn::DataType DataType>
556struct LayerTypePolicy;
557
558// Every entry in the armnn::LayerType enum must be accounted for below.
559DECLARE_LAYER_POLICY_2_PARAM(Activation)
560
561DECLARE_LAYER_POLICY_1_PARAM(Addition)
562
Nikhil Rajee391d52019-09-05 17:50:44 +0100563DECLARE_LAYER_POLICY_2_PARAM(ArgMinMax)
564
telsoa014fcda012018-03-09 14:13:49 +0000565DECLARE_LAYER_POLICY_2_PARAM(BatchNormalization)
566
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000567DECLARE_LAYER_POLICY_2_PARAM(BatchToSpaceNd)
568
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100569DECLARE_LAYER_POLICY_2_PARAM(Comparison)
570
Jim Flynne242f2d2019-05-22 14:24:13 +0100571DECLARE_LAYER_POLICY_2_PARAM(Concat)
572
telsoa014fcda012018-03-09 14:13:49 +0000573DECLARE_LAYER_POLICY_1_PARAM(Constant)
574
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000575DECLARE_LAYER_POLICY_1_PARAM(ConvertBf16ToFp32)
576
telsoa01c577f2c2018-08-31 09:22:23 +0100577DECLARE_LAYER_POLICY_1_PARAM(ConvertFp16ToFp32)
578
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000579DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToBf16)
580
telsoa01c577f2c2018-08-31 09:22:23 +0100581DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToFp16)
582
telsoa014fcda012018-03-09 14:13:49 +0000583DECLARE_LAYER_POLICY_2_PARAM(Convolution2d)
584
585DECLARE_LAYER_POLICY_1_PARAM(MemCopy)
586
Derek Lambertif674aa02019-08-01 15:56:25 +0100587DECLARE_LAYER_POLICY_1_PARAM(MemImport)
588
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000589DECLARE_LAYER_POLICY_1_PARAM(Debug)
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000590
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100591DECLARE_LAYER_POLICY_2_PARAM(DepthToSpace)
592
telsoa014fcda012018-03-09 14:13:49 +0000593DECLARE_LAYER_POLICY_2_PARAM(DepthwiseConvolution2d)
594
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000595DECLARE_LAYER_POLICY_1_PARAM(Dequantize)
596
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000597DECLARE_LAYER_POLICY_2_PARAM(DetectionPostProcess)
598
josh minor4a3c6102020-01-06 16:40:46 -0600599DECLARE_LAYER_POLICY_2_PARAM(ElementwiseUnary)
600
telsoa014fcda012018-03-09 14:13:49 +0000601DECLARE_LAYER_POLICY_2_PARAM(FakeQuantization)
602
Ryan OSheaec6c6802020-06-05 17:17:06 +0100603DECLARE_LAYER_POLICY_2_PARAM(Fill)
604
telsoa014fcda012018-03-09 14:13:49 +0000605DECLARE_LAYER_POLICY_1_PARAM(Floor)
606
607DECLARE_LAYER_POLICY_2_PARAM(FullyConnected)
608
Teresa Charlin52664732020-06-29 16:27:03 +0100609DECLARE_LAYER_POLICY_2_PARAM(Gather)
narpra01b89b05f2019-01-16 09:53:09 +0000610
telsoa014fcda012018-03-09 14:13:49 +0000611DECLARE_LAYER_POLICY_CUSTOM_PARAM(Input, armnn::LayerBindingId)
612
Kevin Mayce5045a2019-10-02 14:07:47 +0100613DECLARE_LAYER_POLICY_2_PARAM(InstanceNormalization)
614
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100615DECLARE_LAYER_POLICY_2_PARAM(L2Normalization)
telsoa014fcda012018-03-09 14:13:49 +0000616
James Conroyaba90cd2020-11-06 16:28:18 +0000617DECLARE_LAYER_POLICY_2_PARAM(LogicalBinary)
618
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100619DECLARE_LAYER_POLICY_2_PARAM(LogSoftmax)
620
telsoa01c577f2c2018-08-31 09:22:23 +0100621DECLARE_LAYER_POLICY_2_PARAM(Lstm)
622
Jim Flynn68db06f2020-10-06 10:14:50 +0100623DECLARE_LAYER_POLICY_MAP_PARAM(Map, void)
624
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000625DECLARE_LAYER_POLICY_1_PARAM(Maximum)
626
narpra0132b90462018-09-13 11:07:48 +0100627DECLARE_LAYER_POLICY_2_PARAM(Mean)
628
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100629DECLARE_LAYER_POLICY_1_PARAM(Merge)
630
kevmay0190539692018-11-29 08:40:19 +0000631DECLARE_LAYER_POLICY_1_PARAM(Minimum)
632
telsoa014fcda012018-03-09 14:13:49 +0000633DECLARE_LAYER_POLICY_1_PARAM(Multiplication)
634
635DECLARE_LAYER_POLICY_2_PARAM(Normalization)
636
637DECLARE_LAYER_POLICY_CUSTOM_PARAM(Output, armnn::LayerBindingId)
638
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100639DECLARE_LAYER_POLICY_2_PARAM(Pad)
640
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000641DECLARE_LAYER_POLICY_1_PARAM(Quantize)
642
telsoa014fcda012018-03-09 14:13:49 +0000643DECLARE_LAYER_POLICY_2_PARAM(Permute)
644
645DECLARE_LAYER_POLICY_2_PARAM(Pooling2d)
646
Matteo Martincigh49124022019-01-11 13:25:59 +0000647DECLARE_LAYER_POLICY_2_PARAM(PreCompiled)
648
Matteo Martincigh0e406ee2019-06-12 15:42:18 +0100649DECLARE_LAYER_POLICY_1_PARAM(Prelu)
James Conroy586a9aa2020-03-20 08:49:33 +0000650DECLARE_LAYER_POLICY_2_PARAM(QLstm)
651
James Conroyee18dc82019-07-17 11:27:46 +0100652DECLARE_LAYER_POLICY_1_PARAM(QuantizedLstm)
653
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100654DECLARE_LAYER_POLICY_1_PARAM(Division)
655
Finn Williams2605b232020-06-10 15:53:46 +0100656DECLARE_LAYER_POLICY_1_PARAM(Rank)
657
Teresa Charlina9075df2019-06-27 15:41:57 +0100658DECLARE_LAYER_POLICY_2_PARAM(Resize)
659
telsoa01c577f2c2018-08-31 09:22:23 +0100660DECLARE_LAYER_POLICY_2_PARAM(Reshape)
661
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100662DECLARE_LAYER_POLICY_2_PARAM(Slice)
663
telsoa014fcda012018-03-09 14:13:49 +0000664DECLARE_LAYER_POLICY_2_PARAM(Softmax)
665
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000666DECLARE_LAYER_POLICY_2_PARAM(SpaceToBatchNd)
667
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100668DECLARE_LAYER_POLICY_2_PARAM(SpaceToDepth)
669
telsoa014fcda012018-03-09 14:13:49 +0000670DECLARE_LAYER_POLICY_2_PARAM(Splitter)
671
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100672DECLARE_LAYER_POLICY_2_PARAM(Stack)
673
Derek Lamberti013c3902019-10-21 10:46:16 +0100674DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(StandIn)
675
Conor Kennedy430b5d82018-11-14 15:28:28 +0000676DECLARE_LAYER_POLICY_2_PARAM(StridedSlice)
677
David Beckc2044fe2018-09-05 15:00:38 +0100678DECLARE_LAYER_POLICY_1_PARAM(Subtraction)
telsoa014fcda012018-03-09 14:13:49 +0000679
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000680DECLARE_LAYER_POLICY_2_PARAM(Reduce)
681
Sadik Armaganeff363d2019-04-05 15:25:46 +0100682DECLARE_LAYER_POLICY_1_PARAM(Switch)
683
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000684DECLARE_LAYER_POLICY_2_PARAM(Transpose)
685
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100686DECLARE_LAYER_POLICY_2_PARAM(TransposeConvolution2d)
687
Jim Flynn3a40ea52020-10-08 11:42:30 +0100688DECLARE_LAYER_POLICY_MAP_PARAM(Unmap, void)
689
telsoa014fcda012018-03-09 14:13:49 +0000690
691// Generic implementation to get the number of input slots for a given layer type;
692template<armnn::LayerType Type>
693unsigned int GetNumInputs(const armnn::Layer& layer)
694{
695 return layer.GetNumInputSlots();
696}
697
698// Generic implementation to get the number of output slots for a given layer type;
699template<armnn::LayerType Type>
700unsigned int GetNumOutputs(const armnn::Layer& layer)
701{
702 return layer.GetNumOutputSlots();
703}
704
705template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100706unsigned int GetNumInputs<armnn::LayerType::Concat>(const armnn::Layer& layer)
telsoa014fcda012018-03-09 14:13:49 +0000707{
Jan Eilers8eb25602020-03-09 12:13:48 +0000708 IgnoreUnused(layer);
telsoa014fcda012018-03-09 14:13:49 +0000709 return 2;
710}
711
telsoa01c577f2c2018-08-31 09:22:23 +0100712// Tests that the IsLayerSupported() function returns the correct value.
713// We determined the correct value by *trying* to create the relevant workload and seeing if it matches what we expect.
telsoa014fcda012018-03-09 14:13:49 +0000714// Returns true if expectations are met, otherwise returns false.
715template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
716bool IsLayerSupportedTest(FactoryType *factory, Tag<Type>)
717{
718 using LayerPolicy = LayerTypePolicy<Type, DataType>;
719 using LayerType = typename LayerPolicy::Type;
720 using LayerDesc = typename LayerPolicy::Desc;
721 DummyLayer<LayerType, LayerDesc> layer;
722
Derek Lambertib99ef392019-10-21 14:10:38 +0100723 if (LayerPolicy::IsException) //Don't test exceptions to the rule.
724 {
725 return true;
726 }
727
telsoa014fcda012018-03-09 14:13:49 +0000728 unsigned int numIn = GetNumInputs<Type>(*layer.m_Layer);
729 unsigned int numOut = GetNumOutputs<Type>(*layer.m_Layer);
730
telsoa01c577f2c2018-08-31 09:22:23 +0100731 // Make another dummy layer just to make IsLayerSupported have valid inputs.
telsoa014fcda012018-03-09 14:13:49 +0000732 DummyLayer<armnn::ConstantLayer, void> previousLayer;
telsoa01c577f2c2018-08-31 09:22:23 +0100733 // Set output of the previous layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000734 armnn::TensorInfo output = MakeDummyTensorInfo<DataType>();
735 previousLayer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
telsoa01c577f2c2018-08-31 09:22:23 +0100736 // Connect all outputs of the previous layer to inputs of tested layer.
telsoa014fcda012018-03-09 14:13:49 +0000737 for (unsigned int i = 0; i < numIn; i++)
738 {
739 armnn::IOutputSlot& previousLayerOutputSlot = previousLayer.m_Layer->GetOutputSlot(0);
740 armnn::IInputSlot& layerInputSlot = layer.m_Layer->GetInputSlot(i);
741 previousLayerOutputSlot.Connect(layerInputSlot);
742 }
telsoa01c577f2c2018-08-31 09:22:23 +0100743 // Set outputs of tested layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000744 for (unsigned int i = 0; i < numOut; i++)
745 {
746 layer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
747 }
748
749 std::string layerName = LayerPolicy::NameStr;
750 std::string reasonIfUnsupported;
751 if (FactoryType::IsLayerSupported(*layer.m_Layer, DataType, reasonIfUnsupported))
752 {
753 std::string errorMsg = " layer expected support but found none.";
754 try
755 {
756 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() != nullptr;
Matteo Martincighfbebcbd2018-10-16 09:45:08 +0100757 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000758 return retVal;
759 }
telsoa01c577f2c2018-08-31 09:22:23 +0100760 catch(const armnn::InvalidArgumentException& e)
telsoa014fcda012018-03-09 14:13:49 +0000761 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000762 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000763 // This is ok since we throw InvalidArgumentException when creating the dummy workload.
764 return true;
765 }
766 catch(const std::exception& e)
767 {
768 errorMsg = e.what();
769 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
770 return false;
771 }
telsoa01c577f2c2018-08-31 09:22:23 +0100772 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000773 {
774 errorMsg = "Unexpected error while testing support for ";
775 BOOST_TEST_ERROR(errorMsg << layerName);
776 return false;
777 }
778 }
779 else
780 {
781 std::string errorMsg = "layer expected no support (giving reason: " + reasonIfUnsupported + ") but found some.";
782 try
783 {
784 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() == nullptr;
785 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
786 return retVal;
787 }
788 // These two exceptions are ok: For workloads that are partially supported, attempting to instantiate them
789 // using parameters that make IsLayerSupported() return false should throw an
telsoa01c577f2c2018-08-31 09:22:23 +0100790 // InvalidArgumentException or UnimplementedException.
telsoa014fcda012018-03-09 14:13:49 +0000791 catch(const armnn::InvalidArgumentException& e)
792 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000793 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000794 return true;
795 }
telsoa01c577f2c2018-08-31 09:22:23 +0100796 catch(const armnn::UnimplementedException& e)
telsoa014fcda012018-03-09 14:13:49 +0000797 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000798 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000799 return true;
800 }
801 catch(const std::exception& e)
802 {
803 errorMsg = e.what();
804 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
805 return false;
806 }
telsoa01c577f2c2018-08-31 09:22:23 +0100807 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000808 {
809 errorMsg = "Unexpected error while testing support for ";
810 BOOST_TEST_ERROR(errorMsg << layerName);
811 return false;
812 }
813 }
814}
815
Jim Flynn68db06f2020-10-06 10:14:50 +0100816template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
817bool IsLayerSupportedTest(FactoryType *factory, Tag<armnn::LayerType::Map>)
818{
819 IgnoreUnused(factory);
820 return true;
821}
822
Jim Flynn3a40ea52020-10-08 11:42:30 +0100823template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
824bool IsLayerSupportedTest(FactoryType *factory, Tag<armnn::LayerType::Unmap>)
825{
826 IgnoreUnused(factory);
827 return true;
828}
829
telsoa01c577f2c2018-08-31 09:22:23 +0100830// Helper function to compute the next type in the LayerType enum.
telsoa014fcda012018-03-09 14:13:49 +0000831constexpr armnn::LayerType NextType(armnn::LayerType type)
832{
833 return static_cast<armnn::LayerType>(static_cast<int>(type)+1);
834}
835
telsoa01c577f2c2018-08-31 09:22:23 +0100836// Termination function for determining the end of the LayerType enumeration.
telsoa014fcda012018-03-09 14:13:49 +0000837template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
838bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<armnn::LayerType::LastLayer>)
839{
840 return IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000841}
telsoa014fcda012018-03-09 14:13:49 +0000842
telsoa01c577f2c2018-08-31 09:22:23 +0100843// Recursive function to test and enter in the LayerType enum and then iterate on the next entry.
telsoa014fcda012018-03-09 14:13:49 +0000844template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
845bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<Type>)
846{
847 bool v = IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
848
849 return v &&
850 IsLayerSupportedTestsImpl<FactoryType, DataType, NextType(Type)>
851 (factory, Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000852}
telsoa014fcda012018-03-09 14:13:49 +0000853
854// Helper function to pass through to the test framework.
855template<typename FactoryType, armnn::DataType DataType>
856bool IsLayerSupportedTests(FactoryType *factory)
857{
858 return IsLayerSupportedTestsImpl<FactoryType, DataType>(factory, Tag<armnn::LayerType::FirstLayer>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000859}
telsoa014fcda012018-03-09 14:13:49 +0000860
861template<armnn::LayerType Type>
862bool TestLayerTypeMatches()
863{
864 using LayerPolicy = LayerTypePolicy<Type, armnn::DataType::Float32>;
865 using LayerType = typename LayerPolicy::Type;
866 using LayerDesc = typename LayerPolicy::Desc;
867 DummyLayer<LayerType, LayerDesc> layer;
868
869 std::stringstream ss;
870 ss << LayerPolicy::NameStr << " layer type mismatches expected layer type value.";
871 bool v = Type == layer.m_Layer->GetType();
872 BOOST_CHECK_MESSAGE(v, ss.str());
873 return v;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000874}
telsoa014fcda012018-03-09 14:13:49 +0000875
876template<armnn::LayerType Type>
877bool LayerTypeMatchesTestImpl(Tag<armnn::LayerType::LastLayer>)
878{
879 return TestLayerTypeMatches<Type>();
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000880}
telsoa014fcda012018-03-09 14:13:49 +0000881
882template<armnn::LayerType Type>
883bool LayerTypeMatchesTestImpl(Tag<Type>)
884{
885 return TestLayerTypeMatches<Type>() &&
886 LayerTypeMatchesTestImpl<NextType(Type)>(Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000887}
telsoa014fcda012018-03-09 14:13:49 +0000888
telsoa01c577f2c2018-08-31 09:22:23 +0100889template<typename FactoryType, typename LayerType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
890bool IsConvertLayerSupportedTests(std::string& reasonIfUnsupported)
891{
892 armnn::Graph graph;
893 LayerType* const layer = graph.AddLayer<LayerType>("LayerName");
894
895 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
896 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
897
898 armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, InputDataType);
899 armnn::TensorInfo outputTensorInfo({1, 3, 2, 3}, OutputDataType);
900
901 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
902 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
903 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
904 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
905
906 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
907
908 return result;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000909}
telsoa01c577f2c2018-08-31 09:22:23 +0100910
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000911template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
James Conroy177df1e2020-11-13 10:18:51 +0000912bool IsLogicalBinaryLayerSupportedTests(std::string& reasonIfUnsupported)
913{
914 armnn::Graph graph;
915 armnn::LogicalBinaryDescriptor desc(armnn::LogicalBinaryOperation::LogicalOr);
916
917 armnn::Layer* const input0 = graph.AddLayer<armnn::InputLayer>(0, "input0");
918 armnn::Layer* const input1 = graph.AddLayer<armnn::InputLayer>(1, "input1");
919
920 armnn::Layer* const layer = graph.AddLayer<armnn::LogicalBinaryLayer>(desc, "logicalOrLayer");
921
922 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output1");
923
924 armnn::TensorInfo inputTensorInfo0({1, 1, 1, 4}, InputDataType);
925 armnn::TensorInfo inputTensorInfo1({1, 1, 1, 4}, InputDataType);
926
927 armnn::TensorInfo outputTensorInfo({1, 1, 1, 4}, OutputDataType);
928
929 input0->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
930 input1->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
931
932 input0->GetOutputHandler(0).SetTensorInfo(inputTensorInfo0);
933 input1->GetOutputHandler(0).SetTensorInfo(inputTensorInfo1);
934
935 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
936 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
937
938 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
939
940 return result;
941}
942
943template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
944bool IsLogicalBinaryLayerBroadcastSupportedTests(std::string& reasonIfUnsupported)
945{
946 armnn::Graph graph;
947 armnn::LogicalBinaryDescriptor desc(armnn::LogicalBinaryOperation::LogicalAnd);
948
949 armnn::Layer* const input0 = graph.AddLayer<armnn::InputLayer>(0, "input0");
950 armnn::Layer* const input1 = graph.AddLayer<armnn::InputLayer>(1, "input1");
951
952 armnn::Layer* const layer = graph.AddLayer<armnn::LogicalBinaryLayer>(desc, "logicalAndLayer");
953
954 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output2");
955
956 armnn::TensorInfo inputTensorInfo0({1, 1, 1, 4}, InputDataType);
957 armnn::TensorInfo inputTensorInfo1({1, 1, 1, 1}, InputDataType);
958
959 armnn::TensorInfo outputTensorInfo({1, 1, 1, 4}, OutputDataType);
960
961 input0->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
962 input1->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
963
964 input0->GetOutputHandler(0).SetTensorInfo(inputTensorInfo0);
965 input1->GetOutputHandler(0).SetTensorInfo(inputTensorInfo1);
966
967 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
968 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
969
970 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
971
972 return result;
973}
974
975template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000976bool IsMeanLayerSupportedTests(std::string& reasonIfUnsupported)
977{
978 armnn::Graph graph;
979 static const std::vector<unsigned> axes = {1, 0};
980 armnn::MeanDescriptor desc(axes, false);
981
982 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
983
984 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
985 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
986
987 armnn::TensorInfo inputTensorInfo({4, 3, 2}, InputDataType);
988 armnn::TensorInfo outputTensorInfo({2}, OutputDataType);
989
990 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
991 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
992 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
993 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
994
995 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
996
997 return result;
998}
999
James Conroy4d1ff582019-06-10 17:06:39 +01001000// Tests that IsMeanSupported fails when input tensor dimensions
1001// do not match output tensor dimensions when keepDims == true
1002template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
1003bool IsMeanLayerNotSupportedTests(std::string& reasonIfUnsupported)
1004{
1005 armnn::Graph graph;
1006 static const std::vector<unsigned> axes = {};
1007 // Set keepDims == true
1008 armnn::MeanDescriptor desc(axes, true);
1009
1010 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
1011
1012 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
1013 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
1014
1015 // Mismatching number of tensor dimensions
1016 armnn::TensorInfo inputTensorInfo({1, 1, 1, 1}, InputDataType);
1017 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
1018
1019 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
1020 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
1021 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1022 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
1023
1024 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
1025
1026 return result;
1027}
1028
Mike Kelly0886ac42020-04-27 09:55:40 +01001029template<typename FactoryType, armnn::DataType OutputDataType>
1030bool IsConstantLayerSupportedTests(std::string& reasonIfUnsupported)
1031{
1032 armnn::Graph graph;
1033
1034 armnn::Layer* const layer = graph.AddLayer<armnn::ConstantLayer>("ConstantLayerName");
1035 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "OutputLayerName");
1036
1037 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
1038
1039 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1040 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
1041
1042 bool result = FactoryType::IsLayerSupported(*layer, OutputDataType, reasonIfUnsupported);
1043
1044 return result;
1045}
Matthew Bentham1f0ff352019-01-02 13:26:31 +00001046
telsoa014fcda012018-03-09 14:13:49 +00001047} //namespace