blob: b73efbe26c420dcbce54f4168044e2e092dfc859 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00007#include <Graph.hpp>
8
Jim Flynn68db06f2020-10-06 10:14:50 +01009#include <backendsCommon/MapWorkload.hpp>
Jim Flynn3a40ea52020-10-08 11:42:30 +010010#include <backendsCommon/UnmapWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000011#include <backendsCommon/WorkloadFactory.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
Jan Eilers8eb25602020-03-09 12:13:48 +000013#include <armnn/utility/IgnoreUnused.hpp>
telsoa014fcda012018-03-09 14:13:49 +000014
15namespace
16{
17armnn::Graph dummyGraph;
18
telsoa01c577f2c2018-08-31 09:22:23 +010019// Make a dummy TensorInfo object.
telsoa014fcda012018-03-09 14:13:49 +000020template<armnn::DataType DataType>
21armnn::TensorInfo MakeDummyTensorInfo()
22{
Teresa Charlin33d58272020-01-28 12:24:34 +000023 return armnn::TensorInfo({2,2,2,2}, DataType, 1.0, 0);
telsoa014fcda012018-03-09 14:13:49 +000024}
25
26
27// Make a dummy WorkloadInfo using a dummy TensorInfo.
28template<armnn::DataType DataType>
29armnn::WorkloadInfo MakeDummyWorkloadInfo(unsigned int numInputs, unsigned int numOutputs)
30{
31 armnn::WorkloadInfo info;
James Conroyee18dc82019-07-17 11:27:46 +010032
telsoa014fcda012018-03-09 14:13:49 +000033 for (unsigned int i=0; i < numInputs; i++)
34 {
35 info.m_InputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
36 }
James Conroyee18dc82019-07-17 11:27:46 +010037
telsoa014fcda012018-03-09 14:13:49 +000038 for (unsigned int o=0; o < numOutputs; o++)
39 {
40 info.m_OutputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
41 }
James Conroyee18dc82019-07-17 11:27:46 +010042
telsoa014fcda012018-03-09 14:13:49 +000043 return info;
44}
45
telsoa01c577f2c2018-08-31 09:22:23 +010046// Template class to create a dummy layer (2 parameters).
telsoa014fcda012018-03-09 14:13:49 +000047template<typename LayerType, typename DescType = typename LayerType::DescriptorType>
48struct DummyLayer
49{
50 DummyLayer()
51 {
52 m_Layer = dummyGraph.AddLayer<LayerType>(DescType(), "");
53 }
James Conroyee18dc82019-07-17 11:27:46 +010054
telsoa014fcda012018-03-09 14:13:49 +000055 ~DummyLayer()
56 {
57 dummyGraph.EraseLayer(m_Layer);
58 }
James Conroyee18dc82019-07-17 11:27:46 +010059
telsoa014fcda012018-03-09 14:13:49 +000060 LayerType* m_Layer;
61};
62
telsoa01c577f2c2018-08-31 09:22:23 +010063// Template class to create a dummy layer (1 parameter).
telsoa014fcda012018-03-09 14:13:49 +000064template<typename LayerType>
65struct DummyLayer<LayerType, void>
66{
67 DummyLayer()
68 {
69 m_Layer = dummyGraph.AddLayer<LayerType>("");
70 }
James Conroyee18dc82019-07-17 11:27:46 +010071
telsoa014fcda012018-03-09 14:13:49 +000072 ~DummyLayer()
73 {
74 dummyGraph.EraseLayer(m_Layer);
75 }
James Conroyee18dc82019-07-17 11:27:46 +010076
telsoa014fcda012018-03-09 14:13:49 +000077 LayerType* m_Layer;
78};
79
80template<>
telsoa01c577f2c2018-08-31 09:22:23 +010081struct DummyLayer<armnn::BatchNormalizationLayer>
82{
83 DummyLayer()
84 {
85 m_Layer = dummyGraph.AddLayer<armnn::BatchNormalizationLayer>(armnn::BatchNormalizationDescriptor(), "");
86 m_Layer->m_Mean = std::make_unique<armnn::ScopedCpuTensorHandle>(
87 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
88 m_Layer->m_Variance = std::make_unique<armnn::ScopedCpuTensorHandle>(
89 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
90 m_Layer->m_Beta = std::make_unique<armnn::ScopedCpuTensorHandle>(
91 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
92 m_Layer->m_Gamma = std::make_unique<armnn::ScopedCpuTensorHandle>(
93 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
94 }
James Conroyee18dc82019-07-17 11:27:46 +010095
telsoa01c577f2c2018-08-31 09:22:23 +010096 ~DummyLayer()
97 {
98 dummyGraph.EraseLayer(m_Layer);
99 }
telsoa01c577f2c2018-08-31 09:22:23 +0100100
James Conroyee18dc82019-07-17 11:27:46 +0100101 armnn::BatchNormalizationLayer* m_Layer;
telsoa01c577f2c2018-08-31 09:22:23 +0100102};
103
104template<>
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000105struct DummyLayer<armnn::BatchToSpaceNdLayer>
106{
107 DummyLayer()
108 {
109 m_Layer = dummyGraph.AddLayer<armnn::BatchToSpaceNdLayer>(armnn::BatchToSpaceNdDescriptor(), "");
110 }
James Conroyee18dc82019-07-17 11:27:46 +0100111
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000112 ~DummyLayer()
113 {
114 dummyGraph.EraseLayer(m_Layer);
115 }
James Conroyee18dc82019-07-17 11:27:46 +0100116
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000117 armnn::BatchToSpaceNdLayer* m_Layer;
118};
119
120template<>
telsoa014fcda012018-03-09 14:13:49 +0000121struct DummyLayer<armnn::ConstantLayer, void>
122{
123 DummyLayer()
124 {
telsoa01c577f2c2018-08-31 09:22:23 +0100125 m_Layer = dummyGraph.AddLayer<armnn::ConstantLayer>("");
telsoa014fcda012018-03-09 14:13:49 +0000126 }
James Conroyee18dc82019-07-17 11:27:46 +0100127
telsoa014fcda012018-03-09 14:13:49 +0000128 ~DummyLayer()
129 {
130 dummyGraph.EraseLayer(m_Layer);
131 }
James Conroyee18dc82019-07-17 11:27:46 +0100132
telsoa014fcda012018-03-09 14:13:49 +0000133 armnn::ConstantLayer* m_Layer;
134};
135
136template<>
137struct DummyLayer<armnn::InputLayer, armnn::LayerBindingId>
138{
139 DummyLayer()
140 {
141 m_Layer = dummyGraph.AddLayer<armnn::InputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000142 }
James Conroyee18dc82019-07-17 11:27:46 +0100143
telsoa014fcda012018-03-09 14:13:49 +0000144 ~DummyLayer()
145 {
146 dummyGraph.EraseLayer(m_Layer);
147 }
James Conroyee18dc82019-07-17 11:27:46 +0100148
telsoa014fcda012018-03-09 14:13:49 +0000149 armnn::InputLayer* m_Layer;
150};
151
152template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100153struct DummyLayer<armnn::ConcatLayer>
telsoa014fcda012018-03-09 14:13:49 +0000154{
155 DummyLayer()
156 {
157 armnn::OriginsDescriptor desc(2);
Jim Flynne242f2d2019-05-22 14:24:13 +0100158 m_Layer = dummyGraph.AddLayer<armnn::ConcatLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000159 }
James Conroyee18dc82019-07-17 11:27:46 +0100160
telsoa014fcda012018-03-09 14:13:49 +0000161 ~DummyLayer()
162 {
163 dummyGraph.EraseLayer(m_Layer);
164 }
James Conroyee18dc82019-07-17 11:27:46 +0100165
Jim Flynne242f2d2019-05-22 14:24:13 +0100166 armnn::ConcatLayer* m_Layer;
telsoa014fcda012018-03-09 14:13:49 +0000167};
168
169template<>
Jim Flynn68db06f2020-10-06 10:14:50 +0100170struct DummyLayer<armnn::MapLayer, void>
171{
172 DummyLayer()
173 {
174 m_Layer = dummyGraph.AddLayer<armnn::MapLayer>("");
175 }
176
177 ~DummyLayer()
178 {
179 dummyGraph.EraseLayer(m_Layer);
180 }
181
182 armnn::MapLayer* m_Layer;
183};
184
185template<>
telsoa014fcda012018-03-09 14:13:49 +0000186struct DummyLayer<armnn::OutputLayer, armnn::LayerBindingId>
187{
188 DummyLayer()
189 {
190 m_Layer = dummyGraph.AddLayer<armnn::OutputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000191 }
James Conroyee18dc82019-07-17 11:27:46 +0100192
telsoa014fcda012018-03-09 14:13:49 +0000193 ~DummyLayer()
194 {
195 dummyGraph.EraseLayer(m_Layer);
196 }
James Conroyee18dc82019-07-17 11:27:46 +0100197
telsoa014fcda012018-03-09 14:13:49 +0000198 armnn::OutputLayer* m_Layer;
199};
200
201template<>
202struct DummyLayer<armnn::SplitterLayer>
203{
204 DummyLayer()
205 {
206 armnn::ViewsDescriptor desc(1);
207 m_Layer = dummyGraph.AddLayer<armnn::SplitterLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000208 }
James Conroyee18dc82019-07-17 11:27:46 +0100209
telsoa014fcda012018-03-09 14:13:49 +0000210 ~DummyLayer()
211 {
212 dummyGraph.EraseLayer(m_Layer);
213 }
James Conroyee18dc82019-07-17 11:27:46 +0100214
telsoa014fcda012018-03-09 14:13:49 +0000215 armnn::SplitterLayer* m_Layer;
216};
217
Jim Flynn3a40ea52020-10-08 11:42:30 +0100218template<>
219struct DummyLayer<armnn::UnmapLayer, void>
220{
221 DummyLayer()
222 {
223 m_Layer = dummyGraph.AddLayer<armnn::UnmapLayer>("");
224 }
225
226 ~DummyLayer()
227 {
228 dummyGraph.EraseLayer(m_Layer);
229 }
230
231 armnn::UnmapLayer* m_Layer;
232};
233
telsoa014fcda012018-03-09 14:13:49 +0000234template <typename ConvolutionLayerType>
235struct DummyConvolutionLayer
236{
237 DummyConvolutionLayer()
238 {
239 typename ConvolutionLayerType::DescriptorType desc;
James Conroy663c1842019-11-01 15:21:48 +0000240 desc.m_StrideX = 1;
241 desc.m_StrideY = 1;
telsoa014fcda012018-03-09 14:13:49 +0000242 m_Layer = dummyGraph.AddLayer<ConvolutionLayerType>(desc, "");
243 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
244 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
245 m_Layer->m_Bias = std::make_unique<armnn::ScopedCpuTensorHandle>(
246 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
247 }
James Conroyee18dc82019-07-17 11:27:46 +0100248
telsoa014fcda012018-03-09 14:13:49 +0000249 ~DummyConvolutionLayer()
250 {
251 dummyGraph.EraseLayer(m_Layer);
252 }
James Conroyee18dc82019-07-17 11:27:46 +0100253
telsoa014fcda012018-03-09 14:13:49 +0000254 ConvolutionLayerType* m_Layer;
255};
256
257template<>
258struct DummyLayer<armnn::Convolution2dLayer>
259 : public DummyConvolutionLayer<armnn::Convolution2dLayer>
260{
261};
262
263template<>
264struct DummyLayer<armnn::DepthwiseConvolution2dLayer>
265 : public DummyConvolutionLayer<armnn::DepthwiseConvolution2dLayer>
266{
267};
268
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100269template<>
270struct DummyLayer<armnn::TransposeConvolution2dLayer>
271 : public DummyConvolutionLayer<armnn::TransposeConvolution2dLayer>
272{
273};
274
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000275template<>
276struct DummyLayer<armnn::DetectionPostProcessLayer>
277{
278 DummyLayer()
279 {
280 m_Layer = dummyGraph.AddLayer<armnn::DetectionPostProcessLayer>(armnn::DetectionPostProcessDescriptor(), "");
281 m_Layer->m_Anchors = std::make_unique<armnn::ScopedCpuTensorHandle>(
282 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
283 }
284
285 ~DummyLayer()
286 {
287 dummyGraph.EraseLayer(m_Layer);
288 }
289
290 armnn::DetectionPostProcessLayer* m_Layer;
291};
292
telsoa01c577f2c2018-08-31 09:22:23 +0100293template <typename LstmLayerType>
294struct DummyLstmLayer
295{
296 DummyLstmLayer()
297 {
298 typename LstmLayerType::DescriptorType desc;
299 desc.m_CifgEnabled = false;
300
301 m_Layer = dummyGraph.AddLayer<LstmLayerType>(armnn::LstmDescriptor(), "");
302 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
303 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
304 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
305 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
306 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
307 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
308 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
309 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
310 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
311 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
312 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
313 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
314 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
315 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
316 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
317 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
318 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
319 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
320
321 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
322 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
323 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
324 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
telsoa01c577f2c2018-08-31 09:22:23 +0100325 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
326 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
327 }
James Conroyee18dc82019-07-17 11:27:46 +0100328
telsoa01c577f2c2018-08-31 09:22:23 +0100329 ~DummyLstmLayer()
330 {
331 dummyGraph.EraseLayer(m_Layer);
332 }
James Conroyee18dc82019-07-17 11:27:46 +0100333
telsoa01c577f2c2018-08-31 09:22:23 +0100334 armnn::LstmLayer* m_Layer;
335};
336
337template<>
338struct DummyLayer<armnn::LstmLayer>
339 : public DummyLstmLayer<armnn::LstmLayer>
340{
341};
342
James Conroy586a9aa2020-03-20 08:49:33 +0000343template <typename QLstmLayerType>
344struct DummyQLstmLayer
345{
346 DummyQLstmLayer()
347 {
348 typename QLstmLayerType::DescriptorType desc;
349 desc.m_CifgEnabled = false;
350 desc.m_PeepholeEnabled = true;
351 desc.m_ProjectionEnabled = true;
352 desc.m_LayerNormEnabled = true;
353
354 m_Layer = dummyGraph.AddLayer<QLstmLayerType>(armnn::QLstmDescriptor(), "qLstm");
355
356 // Basic params
357 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
358 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
359 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
360 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
361 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
362 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
363
364 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
365 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
366 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
367 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
368 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
369 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
370
371 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
372 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
373 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
374 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
375 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
376 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
377
378 // CIFG optional params
379 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
380 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
381 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
382 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
383 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
384 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
385
386 // Projection optional params
387 m_Layer->m_ProjectionParameters.m_ProjectionWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
388 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
389 m_Layer->m_ProjectionParameters.m_ProjectionBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
390 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
391
392 // Peephole optional params
393 m_Layer->m_PeepholeParameters.m_CellToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
394 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
395 m_Layer->m_PeepholeParameters.m_CellToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
396 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
397 m_Layer->m_PeepholeParameters.m_CellToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
398 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
399
400 // Layer normalization optional params
401 m_Layer->m_LayerNormParameters.m_InputLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
402 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
403 m_Layer->m_LayerNormParameters.m_ForgetLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
404 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
405 m_Layer->m_LayerNormParameters.m_CellLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
406 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
407 m_Layer->m_LayerNormParameters.m_OutputLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
408 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
409 }
410
411 ~DummyQLstmLayer()
412 {
413 dummyGraph.EraseLayer(m_Layer);
414 }
415
416 armnn::QLstmLayer* m_Layer;
417};
418
telsoa01c577f2c2018-08-31 09:22:23 +0100419template<>
James Conroyee18dc82019-07-17 11:27:46 +0100420struct DummyLayer<armnn::QuantizedLstmLayer, void>
421{
422 DummyLayer()
423 {
424 m_Layer = dummyGraph.AddLayer<armnn::QuantizedLstmLayer>("");
425
426 m_Layer->m_QuantizedLstmParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000427 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100428 m_Layer->m_QuantizedLstmParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000429 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100430 m_Layer->m_QuantizedLstmParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000431 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100432 m_Layer->m_QuantizedLstmParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000433 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100434
435 m_Layer->m_QuantizedLstmParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000436 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100437 m_Layer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000438 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100439 m_Layer->m_QuantizedLstmParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000440 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100441 m_Layer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000442 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100443
444 m_Layer->m_QuantizedLstmParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
445 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
446 m_Layer->m_QuantizedLstmParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
447 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
448 m_Layer->m_QuantizedLstmParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
449 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
450 m_Layer->m_QuantizedLstmParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
451 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
452 }
453
454 ~DummyLayer()
455 {
456 dummyGraph.EraseLayer(m_Layer);
457 }
458
459 armnn::QuantizedLstmLayer* m_Layer;
460};
461
462template<>
telsoa01c577f2c2018-08-31 09:22:23 +0100463struct DummyLayer<armnn::FullyConnectedLayer>
464{
465 DummyLayer()
466 {
467 armnn::FullyConnectedLayer::DescriptorType desc;
468 m_Layer = dummyGraph.AddLayer<armnn::FullyConnectedLayer>(desc, "");
469 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
470 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
471 }
James Conroyee18dc82019-07-17 11:27:46 +0100472
telsoa01c577f2c2018-08-31 09:22:23 +0100473 ~DummyLayer()
474 {
475 dummyGraph.EraseLayer(m_Layer);
476 }
James Conroyee18dc82019-07-17 11:27:46 +0100477
telsoa01c577f2c2018-08-31 09:22:23 +0100478 armnn::FullyConnectedLayer* m_Layer;
479};
480
telsoa014fcda012018-03-09 14:13:49 +0000481// Tag for giving LayerType entries a unique strong type each.
482template<armnn::LayerType>
483struct Tag{};
484
485#define DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, descType) \
486template<armnn::DataType DataType> \
487struct LayerTypePolicy<armnn::LayerType::name, DataType> \
488{ \
489 using Type = armnn::name##Layer; \
490 using Desc = descType; \
491 using QueueDesc = armnn::name##QueueDescriptor; \
492 constexpr static const char* NameStr = #name; \
Derek Lambertie606b7c2019-10-21 16:51:11 +0100493 constexpr static const bool IsException = false; \
telsoa014fcda012018-03-09 14:13:49 +0000494 \
495 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
496 unsigned int nIn, unsigned int nOut) \
497 { \
498 QueueDesc desc; \
499 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
500 return factory->Create##name(desc, info); \
501 } \
502};
503
Jim Flynn68db06f2020-10-06 10:14:50 +0100504#define DECLARE_LAYER_POLICY_MAP_PARAM(name, descType) \
505template<armnn::DataType DataType> \
506struct LayerTypePolicy<armnn::LayerType::name, DataType> \
507{ \
508 using Type = armnn::name##Layer; \
509 using Desc = descType; \
510 using QueueDesc = armnn::name##QueueDescriptor; \
511 using Workload = armnn::name##Workload; \
512 constexpr static const char* NameStr = #name; \
513 constexpr static const bool IsException = false; \
514 \
515 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory* factory, \
516 unsigned int nIn, unsigned int nOut) \
517 { \
518 IgnoreUnused(factory); \
519 QueueDesc desc; \
520 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
521 return std::make_unique<armnn::name##Workload>(desc, info); \
522 } \
523};
524
telsoa01c577f2c2018-08-31 09:22:23 +0100525// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000526// Use this version for layers whose constructor takes 1 parameter(name).
527#define DECLARE_LAYER_POLICY_1_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, void)
528
telsoa01c577f2c2018-08-31 09:22:23 +0100529// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000530// Use this version for layers whose constructor takes 2 parameters(descriptor and name).
531#define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor)
532
Derek Lamberti013c3902019-10-21 10:46:16 +0100533
534#define DECLARE_LAYER_POLICY_EXCEPTION(name, descType) \
535template<armnn::DataType DataType> \
536struct LayerTypePolicy<armnn::LayerType::name, DataType> \
537{ \
538 using Type = armnn::name##Layer; \
539 using Desc = descType; \
540 constexpr static const char* NameStr = #name; \
Derek Lambertib99ef392019-10-21 14:10:38 +0100541 constexpr static const bool IsException = true; \
Derek Lamberti013c3902019-10-21 10:46:16 +0100542 \
543 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
544 unsigned int nIn, unsigned int nOut) \
545 { \
Jan Eilers8eb25602020-03-09 12:13:48 +0000546 IgnoreUnused(factory, nIn, nOut); \
Derek Lamberti013c3902019-10-21 10:46:16 +0100547 return std::unique_ptr<armnn::IWorkload>(); \
548 } \
549};
550
551#define DECLARE_LAYER_POLICY_EXCEPTION_1_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, void)
552#define DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, armnn::name##Descriptor)
553
telsoa01c577f2c2018-08-31 09:22:23 +0100554// Layer policy template.
telsoa014fcda012018-03-09 14:13:49 +0000555template<armnn::LayerType Type, armnn::DataType DataType>
556struct LayerTypePolicy;
557
558// Every entry in the armnn::LayerType enum must be accounted for below.
559DECLARE_LAYER_POLICY_2_PARAM(Activation)
560
561DECLARE_LAYER_POLICY_1_PARAM(Addition)
562
Nikhil Rajee391d52019-09-05 17:50:44 +0100563DECLARE_LAYER_POLICY_2_PARAM(ArgMinMax)
564
telsoa014fcda012018-03-09 14:13:49 +0000565DECLARE_LAYER_POLICY_2_PARAM(BatchNormalization)
566
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000567DECLARE_LAYER_POLICY_2_PARAM(BatchToSpaceNd)
568
mathad01b392e982021-04-07 12:07:30 +0100569DECLARE_LAYER_POLICY_1_PARAM(Cast)
570
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100571DECLARE_LAYER_POLICY_2_PARAM(Comparison)
572
Jim Flynne242f2d2019-05-22 14:24:13 +0100573DECLARE_LAYER_POLICY_2_PARAM(Concat)
574
telsoa014fcda012018-03-09 14:13:49 +0000575DECLARE_LAYER_POLICY_1_PARAM(Constant)
576
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000577DECLARE_LAYER_POLICY_1_PARAM(ConvertBf16ToFp32)
578
telsoa01c577f2c2018-08-31 09:22:23 +0100579DECLARE_LAYER_POLICY_1_PARAM(ConvertFp16ToFp32)
580
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000581DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToBf16)
582
telsoa01c577f2c2018-08-31 09:22:23 +0100583DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToFp16)
584
telsoa014fcda012018-03-09 14:13:49 +0000585DECLARE_LAYER_POLICY_2_PARAM(Convolution2d)
586
587DECLARE_LAYER_POLICY_1_PARAM(MemCopy)
588
Derek Lambertif674aa02019-08-01 15:56:25 +0100589DECLARE_LAYER_POLICY_1_PARAM(MemImport)
590
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000591DECLARE_LAYER_POLICY_1_PARAM(Debug)
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000592
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100593DECLARE_LAYER_POLICY_2_PARAM(DepthToSpace)
594
telsoa014fcda012018-03-09 14:13:49 +0000595DECLARE_LAYER_POLICY_2_PARAM(DepthwiseConvolution2d)
596
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000597DECLARE_LAYER_POLICY_1_PARAM(Dequantize)
598
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000599DECLARE_LAYER_POLICY_2_PARAM(DetectionPostProcess)
600
josh minor4a3c6102020-01-06 16:40:46 -0600601DECLARE_LAYER_POLICY_2_PARAM(ElementwiseUnary)
602
telsoa014fcda012018-03-09 14:13:49 +0000603DECLARE_LAYER_POLICY_2_PARAM(FakeQuantization)
604
Ryan OSheaec6c6802020-06-05 17:17:06 +0100605DECLARE_LAYER_POLICY_2_PARAM(Fill)
606
telsoa014fcda012018-03-09 14:13:49 +0000607DECLARE_LAYER_POLICY_1_PARAM(Floor)
608
609DECLARE_LAYER_POLICY_2_PARAM(FullyConnected)
610
Teresa Charlin52664732020-06-29 16:27:03 +0100611DECLARE_LAYER_POLICY_2_PARAM(Gather)
narpra01b89b05f2019-01-16 09:53:09 +0000612
telsoa014fcda012018-03-09 14:13:49 +0000613DECLARE_LAYER_POLICY_CUSTOM_PARAM(Input, armnn::LayerBindingId)
614
Kevin Mayce5045a2019-10-02 14:07:47 +0100615DECLARE_LAYER_POLICY_2_PARAM(InstanceNormalization)
616
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100617DECLARE_LAYER_POLICY_2_PARAM(L2Normalization)
telsoa014fcda012018-03-09 14:13:49 +0000618
James Conroyaba90cd2020-11-06 16:28:18 +0000619DECLARE_LAYER_POLICY_2_PARAM(LogicalBinary)
620
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100621DECLARE_LAYER_POLICY_2_PARAM(LogSoftmax)
622
telsoa01c577f2c2018-08-31 09:22:23 +0100623DECLARE_LAYER_POLICY_2_PARAM(Lstm)
624
Jim Flynn68db06f2020-10-06 10:14:50 +0100625DECLARE_LAYER_POLICY_MAP_PARAM(Map, void)
626
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000627DECLARE_LAYER_POLICY_1_PARAM(Maximum)
628
narpra0132b90462018-09-13 11:07:48 +0100629DECLARE_LAYER_POLICY_2_PARAM(Mean)
630
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100631DECLARE_LAYER_POLICY_1_PARAM(Merge)
632
kevmay0190539692018-11-29 08:40:19 +0000633DECLARE_LAYER_POLICY_1_PARAM(Minimum)
634
telsoa014fcda012018-03-09 14:13:49 +0000635DECLARE_LAYER_POLICY_1_PARAM(Multiplication)
636
637DECLARE_LAYER_POLICY_2_PARAM(Normalization)
638
639DECLARE_LAYER_POLICY_CUSTOM_PARAM(Output, armnn::LayerBindingId)
640
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100641DECLARE_LAYER_POLICY_2_PARAM(Pad)
642
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000643DECLARE_LAYER_POLICY_1_PARAM(Quantize)
644
telsoa014fcda012018-03-09 14:13:49 +0000645DECLARE_LAYER_POLICY_2_PARAM(Permute)
646
647DECLARE_LAYER_POLICY_2_PARAM(Pooling2d)
648
Matteo Martincigh49124022019-01-11 13:25:59 +0000649DECLARE_LAYER_POLICY_2_PARAM(PreCompiled)
650
Matteo Martincigh0e406ee2019-06-12 15:42:18 +0100651DECLARE_LAYER_POLICY_1_PARAM(Prelu)
James Conroy586a9aa2020-03-20 08:49:33 +0000652DECLARE_LAYER_POLICY_2_PARAM(QLstm)
653
James Conroyee18dc82019-07-17 11:27:46 +0100654DECLARE_LAYER_POLICY_1_PARAM(QuantizedLstm)
655
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100656DECLARE_LAYER_POLICY_1_PARAM(Division)
657
Finn Williams2605b232020-06-10 15:53:46 +0100658DECLARE_LAYER_POLICY_1_PARAM(Rank)
659
Teresa Charlina9075df2019-06-27 15:41:57 +0100660DECLARE_LAYER_POLICY_2_PARAM(Resize)
661
telsoa01c577f2c2018-08-31 09:22:23 +0100662DECLARE_LAYER_POLICY_2_PARAM(Reshape)
663
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100664DECLARE_LAYER_POLICY_2_PARAM(Slice)
665
telsoa014fcda012018-03-09 14:13:49 +0000666DECLARE_LAYER_POLICY_2_PARAM(Softmax)
667
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000668DECLARE_LAYER_POLICY_2_PARAM(SpaceToBatchNd)
669
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100670DECLARE_LAYER_POLICY_2_PARAM(SpaceToDepth)
671
telsoa014fcda012018-03-09 14:13:49 +0000672DECLARE_LAYER_POLICY_2_PARAM(Splitter)
673
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100674DECLARE_LAYER_POLICY_2_PARAM(Stack)
675
Derek Lamberti013c3902019-10-21 10:46:16 +0100676DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(StandIn)
677
Conor Kennedy430b5d82018-11-14 15:28:28 +0000678DECLARE_LAYER_POLICY_2_PARAM(StridedSlice)
679
David Beckc2044fe2018-09-05 15:00:38 +0100680DECLARE_LAYER_POLICY_1_PARAM(Subtraction)
telsoa014fcda012018-03-09 14:13:49 +0000681
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000682DECLARE_LAYER_POLICY_2_PARAM(Reduce)
683
Sadik Armaganeff363d2019-04-05 15:25:46 +0100684DECLARE_LAYER_POLICY_1_PARAM(Switch)
685
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000686DECLARE_LAYER_POLICY_2_PARAM(Transpose)
687
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100688DECLARE_LAYER_POLICY_2_PARAM(TransposeConvolution2d)
689
Jim Flynn3a40ea52020-10-08 11:42:30 +0100690DECLARE_LAYER_POLICY_MAP_PARAM(Unmap, void)
691
telsoa014fcda012018-03-09 14:13:49 +0000692
693// Generic implementation to get the number of input slots for a given layer type;
694template<armnn::LayerType Type>
695unsigned int GetNumInputs(const armnn::Layer& layer)
696{
697 return layer.GetNumInputSlots();
698}
699
700// Generic implementation to get the number of output slots for a given layer type;
701template<armnn::LayerType Type>
702unsigned int GetNumOutputs(const armnn::Layer& layer)
703{
704 return layer.GetNumOutputSlots();
705}
706
707template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100708unsigned int GetNumInputs<armnn::LayerType::Concat>(const armnn::Layer& layer)
telsoa014fcda012018-03-09 14:13:49 +0000709{
Jan Eilers8eb25602020-03-09 12:13:48 +0000710 IgnoreUnused(layer);
telsoa014fcda012018-03-09 14:13:49 +0000711 return 2;
712}
713
telsoa01c577f2c2018-08-31 09:22:23 +0100714// Tests that the IsLayerSupported() function returns the correct value.
715// We determined the correct value by *trying* to create the relevant workload and seeing if it matches what we expect.
telsoa014fcda012018-03-09 14:13:49 +0000716// Returns true if expectations are met, otherwise returns false.
717template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
718bool IsLayerSupportedTest(FactoryType *factory, Tag<Type>)
719{
720 using LayerPolicy = LayerTypePolicy<Type, DataType>;
721 using LayerType = typename LayerPolicy::Type;
722 using LayerDesc = typename LayerPolicy::Desc;
723 DummyLayer<LayerType, LayerDesc> layer;
724
Derek Lambertib99ef392019-10-21 14:10:38 +0100725 if (LayerPolicy::IsException) //Don't test exceptions to the rule.
726 {
727 return true;
728 }
729
telsoa014fcda012018-03-09 14:13:49 +0000730 unsigned int numIn = GetNumInputs<Type>(*layer.m_Layer);
731 unsigned int numOut = GetNumOutputs<Type>(*layer.m_Layer);
732
telsoa01c577f2c2018-08-31 09:22:23 +0100733 // Make another dummy layer just to make IsLayerSupported have valid inputs.
telsoa014fcda012018-03-09 14:13:49 +0000734 DummyLayer<armnn::ConstantLayer, void> previousLayer;
telsoa01c577f2c2018-08-31 09:22:23 +0100735 // Set output of the previous layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000736 armnn::TensorInfo output = MakeDummyTensorInfo<DataType>();
737 previousLayer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
telsoa01c577f2c2018-08-31 09:22:23 +0100738 // Connect all outputs of the previous layer to inputs of tested layer.
telsoa014fcda012018-03-09 14:13:49 +0000739 for (unsigned int i = 0; i < numIn; i++)
740 {
741 armnn::IOutputSlot& previousLayerOutputSlot = previousLayer.m_Layer->GetOutputSlot(0);
742 armnn::IInputSlot& layerInputSlot = layer.m_Layer->GetInputSlot(i);
743 previousLayerOutputSlot.Connect(layerInputSlot);
744 }
telsoa01c577f2c2018-08-31 09:22:23 +0100745 // Set outputs of tested layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000746 for (unsigned int i = 0; i < numOut; i++)
747 {
748 layer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
749 }
750
751 std::string layerName = LayerPolicy::NameStr;
752 std::string reasonIfUnsupported;
753 if (FactoryType::IsLayerSupported(*layer.m_Layer, DataType, reasonIfUnsupported))
754 {
755 std::string errorMsg = " layer expected support but found none.";
756 try
757 {
758 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() != nullptr;
Matteo Martincighfbebcbd2018-10-16 09:45:08 +0100759 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000760 return retVal;
761 }
telsoa01c577f2c2018-08-31 09:22:23 +0100762 catch(const armnn::InvalidArgumentException& e)
telsoa014fcda012018-03-09 14:13:49 +0000763 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000764 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000765 // This is ok since we throw InvalidArgumentException when creating the dummy workload.
766 return true;
767 }
768 catch(const std::exception& e)
769 {
770 errorMsg = e.what();
771 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
772 return false;
773 }
telsoa01c577f2c2018-08-31 09:22:23 +0100774 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000775 {
776 errorMsg = "Unexpected error while testing support for ";
777 BOOST_TEST_ERROR(errorMsg << layerName);
778 return false;
779 }
780 }
781 else
782 {
783 std::string errorMsg = "layer expected no support (giving reason: " + reasonIfUnsupported + ") but found some.";
784 try
785 {
786 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() == nullptr;
787 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
788 return retVal;
789 }
790 // These two exceptions are ok: For workloads that are partially supported, attempting to instantiate them
791 // using parameters that make IsLayerSupported() return false should throw an
telsoa01c577f2c2018-08-31 09:22:23 +0100792 // InvalidArgumentException or UnimplementedException.
telsoa014fcda012018-03-09 14:13:49 +0000793 catch(const armnn::InvalidArgumentException& e)
794 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000795 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000796 return true;
797 }
telsoa01c577f2c2018-08-31 09:22:23 +0100798 catch(const armnn::UnimplementedException& e)
telsoa014fcda012018-03-09 14:13:49 +0000799 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000800 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000801 return true;
802 }
803 catch(const std::exception& e)
804 {
805 errorMsg = e.what();
806 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
807 return false;
808 }
telsoa01c577f2c2018-08-31 09:22:23 +0100809 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000810 {
811 errorMsg = "Unexpected error while testing support for ";
812 BOOST_TEST_ERROR(errorMsg << layerName);
813 return false;
814 }
815 }
816}
817
Jim Flynn68db06f2020-10-06 10:14:50 +0100818template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
819bool IsLayerSupportedTest(FactoryType *factory, Tag<armnn::LayerType::Map>)
820{
821 IgnoreUnused(factory);
822 return true;
823}
824
Jim Flynn3a40ea52020-10-08 11:42:30 +0100825template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
826bool IsLayerSupportedTest(FactoryType *factory, Tag<armnn::LayerType::Unmap>)
827{
828 IgnoreUnused(factory);
829 return true;
830}
831
telsoa01c577f2c2018-08-31 09:22:23 +0100832// Helper function to compute the next type in the LayerType enum.
telsoa014fcda012018-03-09 14:13:49 +0000833constexpr armnn::LayerType NextType(armnn::LayerType type)
834{
835 return static_cast<armnn::LayerType>(static_cast<int>(type)+1);
836}
837
telsoa01c577f2c2018-08-31 09:22:23 +0100838// Termination function for determining the end of the LayerType enumeration.
telsoa014fcda012018-03-09 14:13:49 +0000839template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
840bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<armnn::LayerType::LastLayer>)
841{
842 return IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000843}
telsoa014fcda012018-03-09 14:13:49 +0000844
telsoa01c577f2c2018-08-31 09:22:23 +0100845// Recursive function to test and enter in the LayerType enum and then iterate on the next entry.
telsoa014fcda012018-03-09 14:13:49 +0000846template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
847bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<Type>)
848{
849 bool v = IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
850
851 return v &&
852 IsLayerSupportedTestsImpl<FactoryType, DataType, NextType(Type)>
853 (factory, Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000854}
telsoa014fcda012018-03-09 14:13:49 +0000855
856// Helper function to pass through to the test framework.
857template<typename FactoryType, armnn::DataType DataType>
858bool IsLayerSupportedTests(FactoryType *factory)
859{
860 return IsLayerSupportedTestsImpl<FactoryType, DataType>(factory, Tag<armnn::LayerType::FirstLayer>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000861}
telsoa014fcda012018-03-09 14:13:49 +0000862
863template<armnn::LayerType Type>
864bool TestLayerTypeMatches()
865{
866 using LayerPolicy = LayerTypePolicy<Type, armnn::DataType::Float32>;
867 using LayerType = typename LayerPolicy::Type;
868 using LayerDesc = typename LayerPolicy::Desc;
869 DummyLayer<LayerType, LayerDesc> layer;
870
871 std::stringstream ss;
872 ss << LayerPolicy::NameStr << " layer type mismatches expected layer type value.";
873 bool v = Type == layer.m_Layer->GetType();
874 BOOST_CHECK_MESSAGE(v, ss.str());
875 return v;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000876}
telsoa014fcda012018-03-09 14:13:49 +0000877
878template<armnn::LayerType Type>
879bool LayerTypeMatchesTestImpl(Tag<armnn::LayerType::LastLayer>)
880{
881 return TestLayerTypeMatches<Type>();
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000882}
telsoa014fcda012018-03-09 14:13:49 +0000883
884template<armnn::LayerType Type>
885bool LayerTypeMatchesTestImpl(Tag<Type>)
886{
887 return TestLayerTypeMatches<Type>() &&
888 LayerTypeMatchesTestImpl<NextType(Type)>(Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000889}
telsoa014fcda012018-03-09 14:13:49 +0000890
telsoa01c577f2c2018-08-31 09:22:23 +0100891template<typename FactoryType, typename LayerType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
892bool IsConvertLayerSupportedTests(std::string& reasonIfUnsupported)
893{
894 armnn::Graph graph;
895 LayerType* const layer = graph.AddLayer<LayerType>("LayerName");
896
897 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
898 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
899
900 armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, InputDataType);
901 armnn::TensorInfo outputTensorInfo({1, 3, 2, 3}, OutputDataType);
902
903 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
904 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
905 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
906 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
907
908 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
909
910 return result;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000911}
telsoa01c577f2c2018-08-31 09:22:23 +0100912
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000913template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
James Conroy177df1e2020-11-13 10:18:51 +0000914bool IsLogicalBinaryLayerSupportedTests(std::string& reasonIfUnsupported)
915{
916 armnn::Graph graph;
917 armnn::LogicalBinaryDescriptor desc(armnn::LogicalBinaryOperation::LogicalOr);
918
919 armnn::Layer* const input0 = graph.AddLayer<armnn::InputLayer>(0, "input0");
920 armnn::Layer* const input1 = graph.AddLayer<armnn::InputLayer>(1, "input1");
921
922 armnn::Layer* const layer = graph.AddLayer<armnn::LogicalBinaryLayer>(desc, "logicalOrLayer");
923
924 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output1");
925
926 armnn::TensorInfo inputTensorInfo0({1, 1, 1, 4}, InputDataType);
927 armnn::TensorInfo inputTensorInfo1({1, 1, 1, 4}, InputDataType);
928
929 armnn::TensorInfo outputTensorInfo({1, 1, 1, 4}, OutputDataType);
930
931 input0->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
932 input1->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
933
934 input0->GetOutputHandler(0).SetTensorInfo(inputTensorInfo0);
935 input1->GetOutputHandler(0).SetTensorInfo(inputTensorInfo1);
936
937 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
938 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
939
940 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
941
942 return result;
943}
944
945template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
946bool IsLogicalBinaryLayerBroadcastSupportedTests(std::string& reasonIfUnsupported)
947{
948 armnn::Graph graph;
949 armnn::LogicalBinaryDescriptor desc(armnn::LogicalBinaryOperation::LogicalAnd);
950
951 armnn::Layer* const input0 = graph.AddLayer<armnn::InputLayer>(0, "input0");
952 armnn::Layer* const input1 = graph.AddLayer<armnn::InputLayer>(1, "input1");
953
954 armnn::Layer* const layer = graph.AddLayer<armnn::LogicalBinaryLayer>(desc, "logicalAndLayer");
955
956 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output2");
957
958 armnn::TensorInfo inputTensorInfo0({1, 1, 1, 4}, InputDataType);
959 armnn::TensorInfo inputTensorInfo1({1, 1, 1, 1}, InputDataType);
960
961 armnn::TensorInfo outputTensorInfo({1, 1, 1, 4}, OutputDataType);
962
963 input0->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
964 input1->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
965
966 input0->GetOutputHandler(0).SetTensorInfo(inputTensorInfo0);
967 input1->GetOutputHandler(0).SetTensorInfo(inputTensorInfo1);
968
969 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
970 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
971
972 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
973
974 return result;
975}
976
977template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000978bool IsMeanLayerSupportedTests(std::string& reasonIfUnsupported)
979{
980 armnn::Graph graph;
981 static const std::vector<unsigned> axes = {1, 0};
982 armnn::MeanDescriptor desc(axes, false);
983
984 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
985
986 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
987 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
988
989 armnn::TensorInfo inputTensorInfo({4, 3, 2}, InputDataType);
990 armnn::TensorInfo outputTensorInfo({2}, OutputDataType);
991
992 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
993 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
994 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
995 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
996
997 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
998
999 return result;
1000}
1001
James Conroy4d1ff582019-06-10 17:06:39 +01001002// Tests that IsMeanSupported fails when input tensor dimensions
1003// do not match output tensor dimensions when keepDims == true
1004template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
1005bool IsMeanLayerNotSupportedTests(std::string& reasonIfUnsupported)
1006{
1007 armnn::Graph graph;
1008 static const std::vector<unsigned> axes = {};
1009 // Set keepDims == true
1010 armnn::MeanDescriptor desc(axes, true);
1011
1012 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
1013
1014 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
1015 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
1016
1017 // Mismatching number of tensor dimensions
1018 armnn::TensorInfo inputTensorInfo({1, 1, 1, 1}, InputDataType);
1019 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
1020
1021 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
1022 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
1023 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1024 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
1025
1026 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
1027
1028 return result;
1029}
1030
Mike Kelly0886ac42020-04-27 09:55:40 +01001031template<typename FactoryType, armnn::DataType OutputDataType>
1032bool IsConstantLayerSupportedTests(std::string& reasonIfUnsupported)
1033{
1034 armnn::Graph graph;
1035
1036 armnn::Layer* const layer = graph.AddLayer<armnn::ConstantLayer>("ConstantLayerName");
1037 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "OutputLayerName");
1038
1039 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
1040
1041 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1042 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
1043
1044 bool result = FactoryType::IsLayerSupported(*layer, OutputDataType, reasonIfUnsupported);
1045
1046 return result;
1047}
Matthew Bentham1f0ff352019-01-02 13:26:31 +00001048
telsoa014fcda012018-03-09 14:13:49 +00001049} //namespace