blob: 7c7ad5f159ba46ff2e177620c75230b02c899273 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00007#include <Graph.hpp>
8
Jim Flynn68db06f2020-10-06 10:14:50 +01009#include <backendsCommon/MapWorkload.hpp>
Jim Flynn3a40ea52020-10-08 11:42:30 +010010#include <backendsCommon/UnmapWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000011#include <backendsCommon/WorkloadFactory.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
Jan Eilers8eb25602020-03-09 12:13:48 +000013#include <armnn/utility/IgnoreUnused.hpp>
telsoa014fcda012018-03-09 14:13:49 +000014
15namespace
16{
17armnn::Graph dummyGraph;
18
telsoa01c577f2c2018-08-31 09:22:23 +010019// Make a dummy TensorInfo object.
telsoa014fcda012018-03-09 14:13:49 +000020template<armnn::DataType DataType>
21armnn::TensorInfo MakeDummyTensorInfo()
22{
Teresa Charlin33d58272020-01-28 12:24:34 +000023 return armnn::TensorInfo({2,2,2,2}, DataType, 1.0, 0);
telsoa014fcda012018-03-09 14:13:49 +000024}
25
26
27// Make a dummy WorkloadInfo using a dummy TensorInfo.
28template<armnn::DataType DataType>
29armnn::WorkloadInfo MakeDummyWorkloadInfo(unsigned int numInputs, unsigned int numOutputs)
30{
31 armnn::WorkloadInfo info;
James Conroyee18dc82019-07-17 11:27:46 +010032
telsoa014fcda012018-03-09 14:13:49 +000033 for (unsigned int i=0; i < numInputs; i++)
34 {
35 info.m_InputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
36 }
James Conroyee18dc82019-07-17 11:27:46 +010037
telsoa014fcda012018-03-09 14:13:49 +000038 for (unsigned int o=0; o < numOutputs; o++)
39 {
40 info.m_OutputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
41 }
James Conroyee18dc82019-07-17 11:27:46 +010042
telsoa014fcda012018-03-09 14:13:49 +000043 return info;
44}
45
telsoa01c577f2c2018-08-31 09:22:23 +010046// Template class to create a dummy layer (2 parameters).
telsoa014fcda012018-03-09 14:13:49 +000047template<typename LayerType, typename DescType = typename LayerType::DescriptorType>
48struct DummyLayer
49{
50 DummyLayer()
51 {
52 m_Layer = dummyGraph.AddLayer<LayerType>(DescType(), "");
53 }
James Conroyee18dc82019-07-17 11:27:46 +010054
telsoa014fcda012018-03-09 14:13:49 +000055 ~DummyLayer()
56 {
57 dummyGraph.EraseLayer(m_Layer);
58 }
James Conroyee18dc82019-07-17 11:27:46 +010059
telsoa014fcda012018-03-09 14:13:49 +000060 LayerType* m_Layer;
61};
62
telsoa01c577f2c2018-08-31 09:22:23 +010063// Template class to create a dummy layer (1 parameter).
telsoa014fcda012018-03-09 14:13:49 +000064template<typename LayerType>
65struct DummyLayer<LayerType, void>
66{
67 DummyLayer()
68 {
69 m_Layer = dummyGraph.AddLayer<LayerType>("");
70 }
James Conroyee18dc82019-07-17 11:27:46 +010071
telsoa014fcda012018-03-09 14:13:49 +000072 ~DummyLayer()
73 {
74 dummyGraph.EraseLayer(m_Layer);
75 }
James Conroyee18dc82019-07-17 11:27:46 +010076
telsoa014fcda012018-03-09 14:13:49 +000077 LayerType* m_Layer;
78};
79
80template<>
telsoa01c577f2c2018-08-31 09:22:23 +010081struct DummyLayer<armnn::BatchNormalizationLayer>
82{
83 DummyLayer()
84 {
85 m_Layer = dummyGraph.AddLayer<armnn::BatchNormalizationLayer>(armnn::BatchNormalizationDescriptor(), "");
86 m_Layer->m_Mean = std::make_unique<armnn::ScopedCpuTensorHandle>(
87 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
88 m_Layer->m_Variance = std::make_unique<armnn::ScopedCpuTensorHandle>(
89 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
90 m_Layer->m_Beta = std::make_unique<armnn::ScopedCpuTensorHandle>(
91 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
92 m_Layer->m_Gamma = std::make_unique<armnn::ScopedCpuTensorHandle>(
93 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
94 }
James Conroyee18dc82019-07-17 11:27:46 +010095
telsoa01c577f2c2018-08-31 09:22:23 +010096 ~DummyLayer()
97 {
98 dummyGraph.EraseLayer(m_Layer);
99 }
telsoa01c577f2c2018-08-31 09:22:23 +0100100
James Conroyee18dc82019-07-17 11:27:46 +0100101 armnn::BatchNormalizationLayer* m_Layer;
telsoa01c577f2c2018-08-31 09:22:23 +0100102};
103
104template<>
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000105struct DummyLayer<armnn::BatchToSpaceNdLayer>
106{
107 DummyLayer()
108 {
109 m_Layer = dummyGraph.AddLayer<armnn::BatchToSpaceNdLayer>(armnn::BatchToSpaceNdDescriptor(), "");
110 }
James Conroyee18dc82019-07-17 11:27:46 +0100111
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000112 ~DummyLayer()
113 {
114 dummyGraph.EraseLayer(m_Layer);
115 }
James Conroyee18dc82019-07-17 11:27:46 +0100116
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000117 armnn::BatchToSpaceNdLayer* m_Layer;
118};
119
120template<>
telsoa014fcda012018-03-09 14:13:49 +0000121struct DummyLayer<armnn::ConstantLayer, void>
122{
123 DummyLayer()
124 {
telsoa01c577f2c2018-08-31 09:22:23 +0100125 m_Layer = dummyGraph.AddLayer<armnn::ConstantLayer>("");
telsoa014fcda012018-03-09 14:13:49 +0000126 }
James Conroyee18dc82019-07-17 11:27:46 +0100127
telsoa014fcda012018-03-09 14:13:49 +0000128 ~DummyLayer()
129 {
130 dummyGraph.EraseLayer(m_Layer);
131 }
James Conroyee18dc82019-07-17 11:27:46 +0100132
telsoa014fcda012018-03-09 14:13:49 +0000133 armnn::ConstantLayer* m_Layer;
134};
135
136template<>
137struct DummyLayer<armnn::InputLayer, armnn::LayerBindingId>
138{
139 DummyLayer()
140 {
141 m_Layer = dummyGraph.AddLayer<armnn::InputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000142 }
James Conroyee18dc82019-07-17 11:27:46 +0100143
telsoa014fcda012018-03-09 14:13:49 +0000144 ~DummyLayer()
145 {
146 dummyGraph.EraseLayer(m_Layer);
147 }
James Conroyee18dc82019-07-17 11:27:46 +0100148
telsoa014fcda012018-03-09 14:13:49 +0000149 armnn::InputLayer* m_Layer;
150};
151
152template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100153struct DummyLayer<armnn::ConcatLayer>
telsoa014fcda012018-03-09 14:13:49 +0000154{
155 DummyLayer()
156 {
157 armnn::OriginsDescriptor desc(2);
Jim Flynne242f2d2019-05-22 14:24:13 +0100158 m_Layer = dummyGraph.AddLayer<armnn::ConcatLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000159 }
James Conroyee18dc82019-07-17 11:27:46 +0100160
telsoa014fcda012018-03-09 14:13:49 +0000161 ~DummyLayer()
162 {
163 dummyGraph.EraseLayer(m_Layer);
164 }
James Conroyee18dc82019-07-17 11:27:46 +0100165
Jim Flynne242f2d2019-05-22 14:24:13 +0100166 armnn::ConcatLayer* m_Layer;
telsoa014fcda012018-03-09 14:13:49 +0000167};
168
169template<>
Jim Flynn68db06f2020-10-06 10:14:50 +0100170struct DummyLayer<armnn::MapLayer, void>
171{
172 DummyLayer()
173 {
174 m_Layer = dummyGraph.AddLayer<armnn::MapLayer>("");
175 }
176
177 ~DummyLayer()
178 {
179 dummyGraph.EraseLayer(m_Layer);
180 }
181
182 armnn::MapLayer* m_Layer;
183};
184
185template<>
telsoa014fcda012018-03-09 14:13:49 +0000186struct DummyLayer<armnn::OutputLayer, armnn::LayerBindingId>
187{
188 DummyLayer()
189 {
190 m_Layer = dummyGraph.AddLayer<armnn::OutputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000191 }
James Conroyee18dc82019-07-17 11:27:46 +0100192
telsoa014fcda012018-03-09 14:13:49 +0000193 ~DummyLayer()
194 {
195 dummyGraph.EraseLayer(m_Layer);
196 }
James Conroyee18dc82019-07-17 11:27:46 +0100197
telsoa014fcda012018-03-09 14:13:49 +0000198 armnn::OutputLayer* m_Layer;
199};
200
201template<>
202struct DummyLayer<armnn::SplitterLayer>
203{
204 DummyLayer()
205 {
206 armnn::ViewsDescriptor desc(1);
207 m_Layer = dummyGraph.AddLayer<armnn::SplitterLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000208 }
James Conroyee18dc82019-07-17 11:27:46 +0100209
telsoa014fcda012018-03-09 14:13:49 +0000210 ~DummyLayer()
211 {
212 dummyGraph.EraseLayer(m_Layer);
213 }
James Conroyee18dc82019-07-17 11:27:46 +0100214
telsoa014fcda012018-03-09 14:13:49 +0000215 armnn::SplitterLayer* m_Layer;
216};
217
Jim Flynn3a40ea52020-10-08 11:42:30 +0100218template<>
219struct DummyLayer<armnn::UnmapLayer, void>
220{
221 DummyLayer()
222 {
223 m_Layer = dummyGraph.AddLayer<armnn::UnmapLayer>("");
224 }
225
226 ~DummyLayer()
227 {
228 dummyGraph.EraseLayer(m_Layer);
229 }
230
231 armnn::UnmapLayer* m_Layer;
232};
233
telsoa014fcda012018-03-09 14:13:49 +0000234template <typename ConvolutionLayerType>
235struct DummyConvolutionLayer
236{
237 DummyConvolutionLayer()
238 {
239 typename ConvolutionLayerType::DescriptorType desc;
James Conroy663c1842019-11-01 15:21:48 +0000240 desc.m_StrideX = 1;
241 desc.m_StrideY = 1;
telsoa014fcda012018-03-09 14:13:49 +0000242 m_Layer = dummyGraph.AddLayer<ConvolutionLayerType>(desc, "");
243 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
244 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
245 m_Layer->m_Bias = std::make_unique<armnn::ScopedCpuTensorHandle>(
246 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
247 }
James Conroyee18dc82019-07-17 11:27:46 +0100248
telsoa014fcda012018-03-09 14:13:49 +0000249 ~DummyConvolutionLayer()
250 {
251 dummyGraph.EraseLayer(m_Layer);
252 }
James Conroyee18dc82019-07-17 11:27:46 +0100253
telsoa014fcda012018-03-09 14:13:49 +0000254 ConvolutionLayerType* m_Layer;
255};
256
257template<>
258struct DummyLayer<armnn::Convolution2dLayer>
259 : public DummyConvolutionLayer<armnn::Convolution2dLayer>
260{
261};
262
263template<>
264struct DummyLayer<armnn::DepthwiseConvolution2dLayer>
265 : public DummyConvolutionLayer<armnn::DepthwiseConvolution2dLayer>
266{
267};
268
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100269template<>
270struct DummyLayer<armnn::TransposeConvolution2dLayer>
271 : public DummyConvolutionLayer<armnn::TransposeConvolution2dLayer>
272{
273};
274
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000275template<>
276struct DummyLayer<armnn::DetectionPostProcessLayer>
277{
278 DummyLayer()
279 {
280 m_Layer = dummyGraph.AddLayer<armnn::DetectionPostProcessLayer>(armnn::DetectionPostProcessDescriptor(), "");
281 m_Layer->m_Anchors = std::make_unique<armnn::ScopedCpuTensorHandle>(
282 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
283 }
284
285 ~DummyLayer()
286 {
287 dummyGraph.EraseLayer(m_Layer);
288 }
289
290 armnn::DetectionPostProcessLayer* m_Layer;
291};
292
telsoa01c577f2c2018-08-31 09:22:23 +0100293template <typename LstmLayerType>
294struct DummyLstmLayer
295{
296 DummyLstmLayer()
297 {
298 typename LstmLayerType::DescriptorType desc;
299 desc.m_CifgEnabled = false;
300
301 m_Layer = dummyGraph.AddLayer<LstmLayerType>(armnn::LstmDescriptor(), "");
302 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
303 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
304 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
305 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
306 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
307 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
308 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
309 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
310 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
311 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
312 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
313 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
314 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
315 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
316 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
317 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
318 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
319 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
320
321 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
322 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
323 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
324 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
telsoa01c577f2c2018-08-31 09:22:23 +0100325 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
326 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
327 }
James Conroyee18dc82019-07-17 11:27:46 +0100328
telsoa01c577f2c2018-08-31 09:22:23 +0100329 ~DummyLstmLayer()
330 {
331 dummyGraph.EraseLayer(m_Layer);
332 }
James Conroyee18dc82019-07-17 11:27:46 +0100333
telsoa01c577f2c2018-08-31 09:22:23 +0100334 armnn::LstmLayer* m_Layer;
335};
336
337template<>
338struct DummyLayer<armnn::LstmLayer>
339 : public DummyLstmLayer<armnn::LstmLayer>
340{
341};
342
James Conroy586a9aa2020-03-20 08:49:33 +0000343template <typename QLstmLayerType>
344struct DummyQLstmLayer
345{
346 DummyQLstmLayer()
347 {
348 typename QLstmLayerType::DescriptorType desc;
349 desc.m_CifgEnabled = false;
350 desc.m_PeepholeEnabled = true;
351 desc.m_ProjectionEnabled = true;
352 desc.m_LayerNormEnabled = true;
353
354 m_Layer = dummyGraph.AddLayer<QLstmLayerType>(armnn::QLstmDescriptor(), "qLstm");
355
356 // Basic params
357 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
358 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
359 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
360 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
361 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
362 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
363
364 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
365 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
366 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
367 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
368 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
369 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
370
371 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
372 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
373 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
374 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
375 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
376 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
377
378 // CIFG optional params
379 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
380 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
381 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
382 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
383 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
384 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
385
386 // Projection optional params
387 m_Layer->m_ProjectionParameters.m_ProjectionWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
388 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
389 m_Layer->m_ProjectionParameters.m_ProjectionBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
390 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
391
392 // Peephole optional params
393 m_Layer->m_PeepholeParameters.m_CellToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
394 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
395 m_Layer->m_PeepholeParameters.m_CellToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
396 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
397 m_Layer->m_PeepholeParameters.m_CellToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
398 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
399
400 // Layer normalization optional params
401 m_Layer->m_LayerNormParameters.m_InputLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
402 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
403 m_Layer->m_LayerNormParameters.m_ForgetLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
404 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
405 m_Layer->m_LayerNormParameters.m_CellLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
406 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
407 m_Layer->m_LayerNormParameters.m_OutputLayerNormWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
408 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
409 }
410
411 ~DummyQLstmLayer()
412 {
413 dummyGraph.EraseLayer(m_Layer);
414 }
415
416 armnn::QLstmLayer* m_Layer;
417};
418
telsoa01c577f2c2018-08-31 09:22:23 +0100419template<>
James Conroyee18dc82019-07-17 11:27:46 +0100420struct DummyLayer<armnn::QuantizedLstmLayer, void>
421{
422 DummyLayer()
423 {
424 m_Layer = dummyGraph.AddLayer<armnn::QuantizedLstmLayer>("");
425
426 m_Layer->m_QuantizedLstmParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000427 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100428 m_Layer->m_QuantizedLstmParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000429 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100430 m_Layer->m_QuantizedLstmParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000431 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100432 m_Layer->m_QuantizedLstmParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000433 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100434
435 m_Layer->m_QuantizedLstmParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000436 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100437 m_Layer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000438 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100439 m_Layer->m_QuantizedLstmParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000440 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100441 m_Layer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000442 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100443
444 m_Layer->m_QuantizedLstmParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
445 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
446 m_Layer->m_QuantizedLstmParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
447 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
448 m_Layer->m_QuantizedLstmParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
449 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
450 m_Layer->m_QuantizedLstmParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
451 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
452 }
453
454 ~DummyLayer()
455 {
456 dummyGraph.EraseLayer(m_Layer);
457 }
458
459 armnn::QuantizedLstmLayer* m_Layer;
460};
461
462template<>
telsoa01c577f2c2018-08-31 09:22:23 +0100463struct DummyLayer<armnn::FullyConnectedLayer>
464{
465 DummyLayer()
466 {
467 armnn::FullyConnectedLayer::DescriptorType desc;
468 m_Layer = dummyGraph.AddLayer<armnn::FullyConnectedLayer>(desc, "");
469 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
470 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
471 }
James Conroyee18dc82019-07-17 11:27:46 +0100472
telsoa01c577f2c2018-08-31 09:22:23 +0100473 ~DummyLayer()
474 {
475 dummyGraph.EraseLayer(m_Layer);
476 }
James Conroyee18dc82019-07-17 11:27:46 +0100477
telsoa01c577f2c2018-08-31 09:22:23 +0100478 armnn::FullyConnectedLayer* m_Layer;
479};
480
telsoa014fcda012018-03-09 14:13:49 +0000481// Tag for giving LayerType entries a unique strong type each.
482template<armnn::LayerType>
483struct Tag{};
484
485#define DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, descType) \
486template<armnn::DataType DataType> \
487struct LayerTypePolicy<armnn::LayerType::name, DataType> \
488{ \
489 using Type = armnn::name##Layer; \
490 using Desc = descType; \
491 using QueueDesc = armnn::name##QueueDescriptor; \
492 constexpr static const char* NameStr = #name; \
Derek Lambertie606b7c2019-10-21 16:51:11 +0100493 constexpr static const bool IsException = false; \
telsoa014fcda012018-03-09 14:13:49 +0000494 \
495 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
496 unsigned int nIn, unsigned int nOut) \
497 { \
498 QueueDesc desc; \
499 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
500 return factory->Create##name(desc, info); \
501 } \
502};
503
Jim Flynn68db06f2020-10-06 10:14:50 +0100504#define DECLARE_LAYER_POLICY_MAP_PARAM(name, descType) \
505template<armnn::DataType DataType> \
506struct LayerTypePolicy<armnn::LayerType::name, DataType> \
507{ \
508 using Type = armnn::name##Layer; \
509 using Desc = descType; \
510 using QueueDesc = armnn::name##QueueDescriptor; \
511 using Workload = armnn::name##Workload; \
512 constexpr static const char* NameStr = #name; \
513 constexpr static const bool IsException = false; \
514 \
515 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory* factory, \
516 unsigned int nIn, unsigned int nOut) \
517 { \
518 IgnoreUnused(factory); \
519 QueueDesc desc; \
520 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
521 return std::make_unique<armnn::name##Workload>(desc, info); \
522 } \
523};
524
telsoa01c577f2c2018-08-31 09:22:23 +0100525// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000526// Use this version for layers whose constructor takes 1 parameter(name).
527#define DECLARE_LAYER_POLICY_1_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, void)
528
telsoa01c577f2c2018-08-31 09:22:23 +0100529// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000530// Use this version for layers whose constructor takes 2 parameters(descriptor and name).
531#define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor)
532
Derek Lamberti013c3902019-10-21 10:46:16 +0100533
534#define DECLARE_LAYER_POLICY_EXCEPTION(name, descType) \
535template<armnn::DataType DataType> \
536struct LayerTypePolicy<armnn::LayerType::name, DataType> \
537{ \
538 using Type = armnn::name##Layer; \
539 using Desc = descType; \
540 constexpr static const char* NameStr = #name; \
Derek Lambertib99ef392019-10-21 14:10:38 +0100541 constexpr static const bool IsException = true; \
Derek Lamberti013c3902019-10-21 10:46:16 +0100542 \
543 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
544 unsigned int nIn, unsigned int nOut) \
545 { \
Jan Eilers8eb25602020-03-09 12:13:48 +0000546 IgnoreUnused(factory, nIn, nOut); \
Derek Lamberti013c3902019-10-21 10:46:16 +0100547 return std::unique_ptr<armnn::IWorkload>(); \
548 } \
549};
550
551#define DECLARE_LAYER_POLICY_EXCEPTION_1_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, void)
552#define DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, armnn::name##Descriptor)
553
telsoa01c577f2c2018-08-31 09:22:23 +0100554// Layer policy template.
telsoa014fcda012018-03-09 14:13:49 +0000555template<armnn::LayerType Type, armnn::DataType DataType>
556struct LayerTypePolicy;
557
558// Every entry in the armnn::LayerType enum must be accounted for below.
559DECLARE_LAYER_POLICY_2_PARAM(Activation)
560
561DECLARE_LAYER_POLICY_1_PARAM(Addition)
562
Nikhil Rajee391d52019-09-05 17:50:44 +0100563DECLARE_LAYER_POLICY_2_PARAM(ArgMinMax)
564
telsoa014fcda012018-03-09 14:13:49 +0000565DECLARE_LAYER_POLICY_2_PARAM(BatchNormalization)
566
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000567DECLARE_LAYER_POLICY_2_PARAM(BatchToSpaceNd)
568
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100569DECLARE_LAYER_POLICY_2_PARAM(Comparison)
570
Jim Flynne242f2d2019-05-22 14:24:13 +0100571DECLARE_LAYER_POLICY_2_PARAM(Concat)
572
telsoa014fcda012018-03-09 14:13:49 +0000573DECLARE_LAYER_POLICY_1_PARAM(Constant)
574
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000575DECLARE_LAYER_POLICY_1_PARAM(ConvertBf16ToFp32)
576
telsoa01c577f2c2018-08-31 09:22:23 +0100577DECLARE_LAYER_POLICY_1_PARAM(ConvertFp16ToFp32)
578
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000579DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToBf16)
580
telsoa01c577f2c2018-08-31 09:22:23 +0100581DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToFp16)
582
telsoa014fcda012018-03-09 14:13:49 +0000583DECLARE_LAYER_POLICY_2_PARAM(Convolution2d)
584
585DECLARE_LAYER_POLICY_1_PARAM(MemCopy)
586
Derek Lambertif674aa02019-08-01 15:56:25 +0100587DECLARE_LAYER_POLICY_1_PARAM(MemImport)
588
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000589DECLARE_LAYER_POLICY_1_PARAM(Debug)
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000590
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100591DECLARE_LAYER_POLICY_2_PARAM(DepthToSpace)
592
telsoa014fcda012018-03-09 14:13:49 +0000593DECLARE_LAYER_POLICY_2_PARAM(DepthwiseConvolution2d)
594
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000595DECLARE_LAYER_POLICY_1_PARAM(Dequantize)
596
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000597DECLARE_LAYER_POLICY_2_PARAM(DetectionPostProcess)
598
josh minor4a3c6102020-01-06 16:40:46 -0600599DECLARE_LAYER_POLICY_2_PARAM(ElementwiseUnary)
600
telsoa014fcda012018-03-09 14:13:49 +0000601DECLARE_LAYER_POLICY_2_PARAM(FakeQuantization)
602
Ryan OSheaec6c6802020-06-05 17:17:06 +0100603DECLARE_LAYER_POLICY_2_PARAM(Fill)
604
telsoa014fcda012018-03-09 14:13:49 +0000605DECLARE_LAYER_POLICY_1_PARAM(Floor)
606
607DECLARE_LAYER_POLICY_2_PARAM(FullyConnected)
608
Teresa Charlin52664732020-06-29 16:27:03 +0100609DECLARE_LAYER_POLICY_2_PARAM(Gather)
narpra01b89b05f2019-01-16 09:53:09 +0000610
telsoa014fcda012018-03-09 14:13:49 +0000611DECLARE_LAYER_POLICY_CUSTOM_PARAM(Input, armnn::LayerBindingId)
612
Kevin Mayce5045a2019-10-02 14:07:47 +0100613DECLARE_LAYER_POLICY_2_PARAM(InstanceNormalization)
614
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100615DECLARE_LAYER_POLICY_2_PARAM(L2Normalization)
telsoa014fcda012018-03-09 14:13:49 +0000616
James Conroyaba90cd2020-11-06 16:28:18 +0000617DECLARE_LAYER_POLICY_2_PARAM(LogicalBinary)
618
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100619DECLARE_LAYER_POLICY_2_PARAM(LogSoftmax)
620
telsoa01c577f2c2018-08-31 09:22:23 +0100621DECLARE_LAYER_POLICY_2_PARAM(Lstm)
622
Jim Flynn68db06f2020-10-06 10:14:50 +0100623DECLARE_LAYER_POLICY_MAP_PARAM(Map, void)
624
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000625DECLARE_LAYER_POLICY_1_PARAM(Maximum)
626
narpra0132b90462018-09-13 11:07:48 +0100627DECLARE_LAYER_POLICY_2_PARAM(Mean)
628
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100629DECLARE_LAYER_POLICY_1_PARAM(Merge)
630
kevmay0190539692018-11-29 08:40:19 +0000631DECLARE_LAYER_POLICY_1_PARAM(Minimum)
632
telsoa014fcda012018-03-09 14:13:49 +0000633DECLARE_LAYER_POLICY_1_PARAM(Multiplication)
634
635DECLARE_LAYER_POLICY_2_PARAM(Normalization)
636
637DECLARE_LAYER_POLICY_CUSTOM_PARAM(Output, armnn::LayerBindingId)
638
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100639DECLARE_LAYER_POLICY_2_PARAM(Pad)
640
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000641DECLARE_LAYER_POLICY_1_PARAM(Quantize)
642
telsoa014fcda012018-03-09 14:13:49 +0000643DECLARE_LAYER_POLICY_2_PARAM(Permute)
644
645DECLARE_LAYER_POLICY_2_PARAM(Pooling2d)
646
Matteo Martincigh49124022019-01-11 13:25:59 +0000647DECLARE_LAYER_POLICY_2_PARAM(PreCompiled)
648
Matteo Martincigh0e406ee2019-06-12 15:42:18 +0100649DECLARE_LAYER_POLICY_1_PARAM(Prelu)
James Conroy586a9aa2020-03-20 08:49:33 +0000650DECLARE_LAYER_POLICY_2_PARAM(QLstm)
651
James Conroyee18dc82019-07-17 11:27:46 +0100652DECLARE_LAYER_POLICY_1_PARAM(QuantizedLstm)
653
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100654DECLARE_LAYER_POLICY_1_PARAM(Division)
655
Finn Williams2605b232020-06-10 15:53:46 +0100656DECLARE_LAYER_POLICY_1_PARAM(Rank)
657
Teresa Charlina9075df2019-06-27 15:41:57 +0100658DECLARE_LAYER_POLICY_2_PARAM(Resize)
659
telsoa01c577f2c2018-08-31 09:22:23 +0100660DECLARE_LAYER_POLICY_2_PARAM(Reshape)
661
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100662DECLARE_LAYER_POLICY_2_PARAM(Slice)
663
telsoa014fcda012018-03-09 14:13:49 +0000664DECLARE_LAYER_POLICY_2_PARAM(Softmax)
665
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000666DECLARE_LAYER_POLICY_2_PARAM(SpaceToBatchNd)
667
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100668DECLARE_LAYER_POLICY_2_PARAM(SpaceToDepth)
669
telsoa014fcda012018-03-09 14:13:49 +0000670DECLARE_LAYER_POLICY_2_PARAM(Splitter)
671
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100672DECLARE_LAYER_POLICY_2_PARAM(Stack)
673
Derek Lamberti013c3902019-10-21 10:46:16 +0100674DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(StandIn)
675
Conor Kennedy430b5d82018-11-14 15:28:28 +0000676DECLARE_LAYER_POLICY_2_PARAM(StridedSlice)
677
David Beckc2044fe2018-09-05 15:00:38 +0100678DECLARE_LAYER_POLICY_1_PARAM(Subtraction)
telsoa014fcda012018-03-09 14:13:49 +0000679
Sadik Armaganeff363d2019-04-05 15:25:46 +0100680DECLARE_LAYER_POLICY_1_PARAM(Switch)
681
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000682DECLARE_LAYER_POLICY_2_PARAM(Transpose)
683
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100684DECLARE_LAYER_POLICY_2_PARAM(TransposeConvolution2d)
685
Jim Flynn3a40ea52020-10-08 11:42:30 +0100686DECLARE_LAYER_POLICY_MAP_PARAM(Unmap, void)
687
telsoa014fcda012018-03-09 14:13:49 +0000688
689// Generic implementation to get the number of input slots for a given layer type;
690template<armnn::LayerType Type>
691unsigned int GetNumInputs(const armnn::Layer& layer)
692{
693 return layer.GetNumInputSlots();
694}
695
696// Generic implementation to get the number of output slots for a given layer type;
697template<armnn::LayerType Type>
698unsigned int GetNumOutputs(const armnn::Layer& layer)
699{
700 return layer.GetNumOutputSlots();
701}
702
703template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100704unsigned int GetNumInputs<armnn::LayerType::Concat>(const armnn::Layer& layer)
telsoa014fcda012018-03-09 14:13:49 +0000705{
Jan Eilers8eb25602020-03-09 12:13:48 +0000706 IgnoreUnused(layer);
telsoa014fcda012018-03-09 14:13:49 +0000707 return 2;
708}
709
telsoa01c577f2c2018-08-31 09:22:23 +0100710// Tests that the IsLayerSupported() function returns the correct value.
711// We determined the correct value by *trying* to create the relevant workload and seeing if it matches what we expect.
telsoa014fcda012018-03-09 14:13:49 +0000712// Returns true if expectations are met, otherwise returns false.
713template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
714bool IsLayerSupportedTest(FactoryType *factory, Tag<Type>)
715{
716 using LayerPolicy = LayerTypePolicy<Type, DataType>;
717 using LayerType = typename LayerPolicy::Type;
718 using LayerDesc = typename LayerPolicy::Desc;
719 DummyLayer<LayerType, LayerDesc> layer;
720
Derek Lambertib99ef392019-10-21 14:10:38 +0100721 if (LayerPolicy::IsException) //Don't test exceptions to the rule.
722 {
723 return true;
724 }
725
telsoa014fcda012018-03-09 14:13:49 +0000726 unsigned int numIn = GetNumInputs<Type>(*layer.m_Layer);
727 unsigned int numOut = GetNumOutputs<Type>(*layer.m_Layer);
728
telsoa01c577f2c2018-08-31 09:22:23 +0100729 // Make another dummy layer just to make IsLayerSupported have valid inputs.
telsoa014fcda012018-03-09 14:13:49 +0000730 DummyLayer<armnn::ConstantLayer, void> previousLayer;
telsoa01c577f2c2018-08-31 09:22:23 +0100731 // Set output of the previous layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000732 armnn::TensorInfo output = MakeDummyTensorInfo<DataType>();
733 previousLayer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
telsoa01c577f2c2018-08-31 09:22:23 +0100734 // Connect all outputs of the previous layer to inputs of tested layer.
telsoa014fcda012018-03-09 14:13:49 +0000735 for (unsigned int i = 0; i < numIn; i++)
736 {
737 armnn::IOutputSlot& previousLayerOutputSlot = previousLayer.m_Layer->GetOutputSlot(0);
738 armnn::IInputSlot& layerInputSlot = layer.m_Layer->GetInputSlot(i);
739 previousLayerOutputSlot.Connect(layerInputSlot);
740 }
telsoa01c577f2c2018-08-31 09:22:23 +0100741 // Set outputs of tested layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000742 for (unsigned int i = 0; i < numOut; i++)
743 {
744 layer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
745 }
746
747 std::string layerName = LayerPolicy::NameStr;
748 std::string reasonIfUnsupported;
749 if (FactoryType::IsLayerSupported(*layer.m_Layer, DataType, reasonIfUnsupported))
750 {
751 std::string errorMsg = " layer expected support but found none.";
752 try
753 {
754 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() != nullptr;
Matteo Martincighfbebcbd2018-10-16 09:45:08 +0100755 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000756 return retVal;
757 }
telsoa01c577f2c2018-08-31 09:22:23 +0100758 catch(const armnn::InvalidArgumentException& e)
telsoa014fcda012018-03-09 14:13:49 +0000759 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000760 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000761 // This is ok since we throw InvalidArgumentException when creating the dummy workload.
762 return true;
763 }
764 catch(const std::exception& e)
765 {
766 errorMsg = e.what();
767 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
768 return false;
769 }
telsoa01c577f2c2018-08-31 09:22:23 +0100770 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000771 {
772 errorMsg = "Unexpected error while testing support for ";
773 BOOST_TEST_ERROR(errorMsg << layerName);
774 return false;
775 }
776 }
777 else
778 {
779 std::string errorMsg = "layer expected no support (giving reason: " + reasonIfUnsupported + ") but found some.";
780 try
781 {
782 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() == nullptr;
783 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
784 return retVal;
785 }
786 // These two exceptions are ok: For workloads that are partially supported, attempting to instantiate them
787 // using parameters that make IsLayerSupported() return false should throw an
telsoa01c577f2c2018-08-31 09:22:23 +0100788 // InvalidArgumentException or UnimplementedException.
telsoa014fcda012018-03-09 14:13:49 +0000789 catch(const armnn::InvalidArgumentException& e)
790 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000791 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000792 return true;
793 }
telsoa01c577f2c2018-08-31 09:22:23 +0100794 catch(const armnn::UnimplementedException& e)
telsoa014fcda012018-03-09 14:13:49 +0000795 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000796 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000797 return true;
798 }
799 catch(const std::exception& e)
800 {
801 errorMsg = e.what();
802 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
803 return false;
804 }
telsoa01c577f2c2018-08-31 09:22:23 +0100805 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000806 {
807 errorMsg = "Unexpected error while testing support for ";
808 BOOST_TEST_ERROR(errorMsg << layerName);
809 return false;
810 }
811 }
812}
813
Jim Flynn68db06f2020-10-06 10:14:50 +0100814template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
815bool IsLayerSupportedTest(FactoryType *factory, Tag<armnn::LayerType::Map>)
816{
817 IgnoreUnused(factory);
818 return true;
819}
820
Jim Flynn3a40ea52020-10-08 11:42:30 +0100821template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
822bool IsLayerSupportedTest(FactoryType *factory, Tag<armnn::LayerType::Unmap>)
823{
824 IgnoreUnused(factory);
825 return true;
826}
827
telsoa01c577f2c2018-08-31 09:22:23 +0100828// Helper function to compute the next type in the LayerType enum.
telsoa014fcda012018-03-09 14:13:49 +0000829constexpr armnn::LayerType NextType(armnn::LayerType type)
830{
831 return static_cast<armnn::LayerType>(static_cast<int>(type)+1);
832}
833
telsoa01c577f2c2018-08-31 09:22:23 +0100834// Termination function for determining the end of the LayerType enumeration.
telsoa014fcda012018-03-09 14:13:49 +0000835template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
836bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<armnn::LayerType::LastLayer>)
837{
838 return IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000839}
telsoa014fcda012018-03-09 14:13:49 +0000840
telsoa01c577f2c2018-08-31 09:22:23 +0100841// Recursive function to test and enter in the LayerType enum and then iterate on the next entry.
telsoa014fcda012018-03-09 14:13:49 +0000842template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
843bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<Type>)
844{
845 bool v = IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
846
847 return v &&
848 IsLayerSupportedTestsImpl<FactoryType, DataType, NextType(Type)>
849 (factory, Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000850}
telsoa014fcda012018-03-09 14:13:49 +0000851
852// Helper function to pass through to the test framework.
853template<typename FactoryType, armnn::DataType DataType>
854bool IsLayerSupportedTests(FactoryType *factory)
855{
856 return IsLayerSupportedTestsImpl<FactoryType, DataType>(factory, Tag<armnn::LayerType::FirstLayer>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000857}
telsoa014fcda012018-03-09 14:13:49 +0000858
859template<armnn::LayerType Type>
860bool TestLayerTypeMatches()
861{
862 using LayerPolicy = LayerTypePolicy<Type, armnn::DataType::Float32>;
863 using LayerType = typename LayerPolicy::Type;
864 using LayerDesc = typename LayerPolicy::Desc;
865 DummyLayer<LayerType, LayerDesc> layer;
866
867 std::stringstream ss;
868 ss << LayerPolicy::NameStr << " layer type mismatches expected layer type value.";
869 bool v = Type == layer.m_Layer->GetType();
870 BOOST_CHECK_MESSAGE(v, ss.str());
871 return v;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000872}
telsoa014fcda012018-03-09 14:13:49 +0000873
874template<armnn::LayerType Type>
875bool LayerTypeMatchesTestImpl(Tag<armnn::LayerType::LastLayer>)
876{
877 return TestLayerTypeMatches<Type>();
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000878}
telsoa014fcda012018-03-09 14:13:49 +0000879
880template<armnn::LayerType Type>
881bool LayerTypeMatchesTestImpl(Tag<Type>)
882{
883 return TestLayerTypeMatches<Type>() &&
884 LayerTypeMatchesTestImpl<NextType(Type)>(Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000885}
telsoa014fcda012018-03-09 14:13:49 +0000886
telsoa01c577f2c2018-08-31 09:22:23 +0100887template<typename FactoryType, typename LayerType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
888bool IsConvertLayerSupportedTests(std::string& reasonIfUnsupported)
889{
890 armnn::Graph graph;
891 LayerType* const layer = graph.AddLayer<LayerType>("LayerName");
892
893 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
894 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
895
896 armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, InputDataType);
897 armnn::TensorInfo outputTensorInfo({1, 3, 2, 3}, OutputDataType);
898
899 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
900 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
901 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
902 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
903
904 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
905
906 return result;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000907}
telsoa01c577f2c2018-08-31 09:22:23 +0100908
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000909template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
910bool IsMeanLayerSupportedTests(std::string& reasonIfUnsupported)
911{
912 armnn::Graph graph;
913 static const std::vector<unsigned> axes = {1, 0};
914 armnn::MeanDescriptor desc(axes, false);
915
916 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
917
918 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
919 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
920
921 armnn::TensorInfo inputTensorInfo({4, 3, 2}, InputDataType);
922 armnn::TensorInfo outputTensorInfo({2}, OutputDataType);
923
924 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
925 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
926 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
927 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
928
929 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
930
931 return result;
932}
933
James Conroy4d1ff582019-06-10 17:06:39 +0100934// Tests that IsMeanSupported fails when input tensor dimensions
935// do not match output tensor dimensions when keepDims == true
936template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
937bool IsMeanLayerNotSupportedTests(std::string& reasonIfUnsupported)
938{
939 armnn::Graph graph;
940 static const std::vector<unsigned> axes = {};
941 // Set keepDims == true
942 armnn::MeanDescriptor desc(axes, true);
943
944 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
945
946 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
947 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
948
949 // Mismatching number of tensor dimensions
950 armnn::TensorInfo inputTensorInfo({1, 1, 1, 1}, InputDataType);
951 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
952
953 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
954 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
955 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
956 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
957
958 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
959
960 return result;
961}
962
Mike Kelly0886ac42020-04-27 09:55:40 +0100963template<typename FactoryType, armnn::DataType OutputDataType>
964bool IsConstantLayerSupportedTests(std::string& reasonIfUnsupported)
965{
966 armnn::Graph graph;
967
968 armnn::Layer* const layer = graph.AddLayer<armnn::ConstantLayer>("ConstantLayerName");
969 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "OutputLayerName");
970
971 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
972
973 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
974 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
975
976 bool result = FactoryType::IsLayerSupported(*layer, OutputDataType, reasonIfUnsupported);
977
978 return result;
979}
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000980
telsoa014fcda012018-03-09 14:13:49 +0000981} //namespace