blob: ddd6eacb6d6dbc64d189ad3f543a671ec6ee4d46 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00007#include <Graph.hpp>
8
Jim Flynn68db06f2020-10-06 10:14:50 +01009#include <backendsCommon/MapWorkload.hpp>
Jim Flynn3a40ea52020-10-08 11:42:30 +010010#include <backendsCommon/UnmapWorkload.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000011#include <backendsCommon/WorkloadFactory.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
Jan Eilers8eb25602020-03-09 12:13:48 +000013#include <armnn/utility/IgnoreUnused.hpp>
telsoa014fcda012018-03-09 14:13:49 +000014
Sadik Armagan1625efc2021-06-10 18:24:34 +010015#include <doctest/doctest.h>
16
telsoa014fcda012018-03-09 14:13:49 +000017namespace
18{
19armnn::Graph dummyGraph;
20
telsoa01c577f2c2018-08-31 09:22:23 +010021// Make a dummy TensorInfo object.
telsoa014fcda012018-03-09 14:13:49 +000022template<armnn::DataType DataType>
23armnn::TensorInfo MakeDummyTensorInfo()
24{
Teresa Charlin33d58272020-01-28 12:24:34 +000025 return armnn::TensorInfo({2,2,2,2}, DataType, 1.0, 0);
telsoa014fcda012018-03-09 14:13:49 +000026}
27
28
29// Make a dummy WorkloadInfo using a dummy TensorInfo.
30template<armnn::DataType DataType>
31armnn::WorkloadInfo MakeDummyWorkloadInfo(unsigned int numInputs, unsigned int numOutputs)
32{
33 armnn::WorkloadInfo info;
James Conroyee18dc82019-07-17 11:27:46 +010034
telsoa014fcda012018-03-09 14:13:49 +000035 for (unsigned int i=0; i < numInputs; i++)
36 {
37 info.m_InputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
38 }
James Conroyee18dc82019-07-17 11:27:46 +010039
telsoa014fcda012018-03-09 14:13:49 +000040 for (unsigned int o=0; o < numOutputs; o++)
41 {
42 info.m_OutputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
43 }
James Conroyee18dc82019-07-17 11:27:46 +010044
telsoa014fcda012018-03-09 14:13:49 +000045 return info;
46}
47
telsoa01c577f2c2018-08-31 09:22:23 +010048// Template class to create a dummy layer (2 parameters).
telsoa014fcda012018-03-09 14:13:49 +000049template<typename LayerType, typename DescType = typename LayerType::DescriptorType>
50struct DummyLayer
51{
52 DummyLayer()
53 {
54 m_Layer = dummyGraph.AddLayer<LayerType>(DescType(), "");
55 }
James Conroyee18dc82019-07-17 11:27:46 +010056
telsoa014fcda012018-03-09 14:13:49 +000057 ~DummyLayer()
58 {
59 dummyGraph.EraseLayer(m_Layer);
60 }
James Conroyee18dc82019-07-17 11:27:46 +010061
telsoa014fcda012018-03-09 14:13:49 +000062 LayerType* m_Layer;
63};
64
telsoa01c577f2c2018-08-31 09:22:23 +010065// Template class to create a dummy layer (1 parameter).
telsoa014fcda012018-03-09 14:13:49 +000066template<typename LayerType>
67struct DummyLayer<LayerType, void>
68{
69 DummyLayer()
70 {
71 m_Layer = dummyGraph.AddLayer<LayerType>("");
72 }
James Conroyee18dc82019-07-17 11:27:46 +010073
telsoa014fcda012018-03-09 14:13:49 +000074 ~DummyLayer()
75 {
76 dummyGraph.EraseLayer(m_Layer);
77 }
James Conroyee18dc82019-07-17 11:27:46 +010078
telsoa014fcda012018-03-09 14:13:49 +000079 LayerType* m_Layer;
80};
81
82template<>
telsoa01c577f2c2018-08-31 09:22:23 +010083struct DummyLayer<armnn::BatchNormalizationLayer>
84{
85 DummyLayer()
86 {
87 m_Layer = dummyGraph.AddLayer<armnn::BatchNormalizationLayer>(armnn::BatchNormalizationDescriptor(), "");
James Conroy1f58f032021-04-27 17:13:27 +010088 m_Layer->m_Mean = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +010089 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +010090 m_Layer->m_Variance = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +010091 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +010092 m_Layer->m_Beta = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +010093 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +010094 m_Layer->m_Gamma = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +010095 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
96 }
James Conroyee18dc82019-07-17 11:27:46 +010097
telsoa01c577f2c2018-08-31 09:22:23 +010098 ~DummyLayer()
99 {
100 dummyGraph.EraseLayer(m_Layer);
101 }
telsoa01c577f2c2018-08-31 09:22:23 +0100102
James Conroyee18dc82019-07-17 11:27:46 +0100103 armnn::BatchNormalizationLayer* m_Layer;
telsoa01c577f2c2018-08-31 09:22:23 +0100104};
105
106template<>
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000107struct DummyLayer<armnn::BatchToSpaceNdLayer>
108{
109 DummyLayer()
110 {
111 m_Layer = dummyGraph.AddLayer<armnn::BatchToSpaceNdLayer>(armnn::BatchToSpaceNdDescriptor(), "");
112 }
James Conroyee18dc82019-07-17 11:27:46 +0100113
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000114 ~DummyLayer()
115 {
116 dummyGraph.EraseLayer(m_Layer);
117 }
James Conroyee18dc82019-07-17 11:27:46 +0100118
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000119 armnn::BatchToSpaceNdLayer* m_Layer;
120};
121
122template<>
telsoa014fcda012018-03-09 14:13:49 +0000123struct DummyLayer<armnn::ConstantLayer, void>
124{
125 DummyLayer()
126 {
telsoa01c577f2c2018-08-31 09:22:23 +0100127 m_Layer = dummyGraph.AddLayer<armnn::ConstantLayer>("");
telsoa014fcda012018-03-09 14:13:49 +0000128 }
James Conroyee18dc82019-07-17 11:27:46 +0100129
telsoa014fcda012018-03-09 14:13:49 +0000130 ~DummyLayer()
131 {
132 dummyGraph.EraseLayer(m_Layer);
133 }
James Conroyee18dc82019-07-17 11:27:46 +0100134
telsoa014fcda012018-03-09 14:13:49 +0000135 armnn::ConstantLayer* m_Layer;
136};
137
138template<>
139struct DummyLayer<armnn::InputLayer, armnn::LayerBindingId>
140{
141 DummyLayer()
142 {
143 m_Layer = dummyGraph.AddLayer<armnn::InputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000144 }
James Conroyee18dc82019-07-17 11:27:46 +0100145
telsoa014fcda012018-03-09 14:13:49 +0000146 ~DummyLayer()
147 {
148 dummyGraph.EraseLayer(m_Layer);
149 }
James Conroyee18dc82019-07-17 11:27:46 +0100150
telsoa014fcda012018-03-09 14:13:49 +0000151 armnn::InputLayer* m_Layer;
152};
153
154template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100155struct DummyLayer<armnn::ConcatLayer>
telsoa014fcda012018-03-09 14:13:49 +0000156{
157 DummyLayer()
158 {
159 armnn::OriginsDescriptor desc(2);
Jim Flynne242f2d2019-05-22 14:24:13 +0100160 m_Layer = dummyGraph.AddLayer<armnn::ConcatLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000161 }
James Conroyee18dc82019-07-17 11:27:46 +0100162
telsoa014fcda012018-03-09 14:13:49 +0000163 ~DummyLayer()
164 {
165 dummyGraph.EraseLayer(m_Layer);
166 }
James Conroyee18dc82019-07-17 11:27:46 +0100167
Jim Flynne242f2d2019-05-22 14:24:13 +0100168 armnn::ConcatLayer* m_Layer;
telsoa014fcda012018-03-09 14:13:49 +0000169};
170
171template<>
Jim Flynn68db06f2020-10-06 10:14:50 +0100172struct DummyLayer<armnn::MapLayer, void>
173{
174 DummyLayer()
175 {
176 m_Layer = dummyGraph.AddLayer<armnn::MapLayer>("");
177 }
178
179 ~DummyLayer()
180 {
181 dummyGraph.EraseLayer(m_Layer);
182 }
183
184 armnn::MapLayer* m_Layer;
185};
186
187template<>
telsoa014fcda012018-03-09 14:13:49 +0000188struct DummyLayer<armnn::OutputLayer, armnn::LayerBindingId>
189{
190 DummyLayer()
191 {
192 m_Layer = dummyGraph.AddLayer<armnn::OutputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000193 }
James Conroyee18dc82019-07-17 11:27:46 +0100194
telsoa014fcda012018-03-09 14:13:49 +0000195 ~DummyLayer()
196 {
197 dummyGraph.EraseLayer(m_Layer);
198 }
James Conroyee18dc82019-07-17 11:27:46 +0100199
telsoa014fcda012018-03-09 14:13:49 +0000200 armnn::OutputLayer* m_Layer;
201};
202
203template<>
204struct DummyLayer<armnn::SplitterLayer>
205{
206 DummyLayer()
207 {
208 armnn::ViewsDescriptor desc(1);
209 m_Layer = dummyGraph.AddLayer<armnn::SplitterLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000210 }
James Conroyee18dc82019-07-17 11:27:46 +0100211
telsoa014fcda012018-03-09 14:13:49 +0000212 ~DummyLayer()
213 {
214 dummyGraph.EraseLayer(m_Layer);
215 }
James Conroyee18dc82019-07-17 11:27:46 +0100216
telsoa014fcda012018-03-09 14:13:49 +0000217 armnn::SplitterLayer* m_Layer;
218};
219
Jim Flynn3a40ea52020-10-08 11:42:30 +0100220template<>
221struct DummyLayer<armnn::UnmapLayer, void>
222{
223 DummyLayer()
224 {
225 m_Layer = dummyGraph.AddLayer<armnn::UnmapLayer>("");
226 }
227
228 ~DummyLayer()
229 {
230 dummyGraph.EraseLayer(m_Layer);
231 }
232
233 armnn::UnmapLayer* m_Layer;
234};
235
telsoa014fcda012018-03-09 14:13:49 +0000236template <typename ConvolutionLayerType>
237struct DummyConvolutionLayer
238{
239 DummyConvolutionLayer()
240 {
241 typename ConvolutionLayerType::DescriptorType desc;
James Conroy663c1842019-11-01 15:21:48 +0000242 desc.m_StrideX = 1;
243 desc.m_StrideY = 1;
telsoa014fcda012018-03-09 14:13:49 +0000244 m_Layer = dummyGraph.AddLayer<ConvolutionLayerType>(desc, "");
James Conroy1f58f032021-04-27 17:13:27 +0100245 m_Layer->m_Weight = std::make_unique<armnn::ScopedTensorHandle>(
telsoa014fcda012018-03-09 14:13:49 +0000246 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100247 m_Layer->m_Bias = std::make_unique<armnn::ScopedTensorHandle>(
telsoa014fcda012018-03-09 14:13:49 +0000248 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
249 }
James Conroyee18dc82019-07-17 11:27:46 +0100250
telsoa014fcda012018-03-09 14:13:49 +0000251 ~DummyConvolutionLayer()
252 {
253 dummyGraph.EraseLayer(m_Layer);
254 }
James Conroyee18dc82019-07-17 11:27:46 +0100255
telsoa014fcda012018-03-09 14:13:49 +0000256 ConvolutionLayerType* m_Layer;
257};
258
259template<>
260struct DummyLayer<armnn::Convolution2dLayer>
261 : public DummyConvolutionLayer<armnn::Convolution2dLayer>
262{
263};
264
265template<>
266struct DummyLayer<armnn::DepthwiseConvolution2dLayer>
267 : public DummyConvolutionLayer<armnn::DepthwiseConvolution2dLayer>
268{
269};
270
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100271template<>
272struct DummyLayer<armnn::TransposeConvolution2dLayer>
273 : public DummyConvolutionLayer<armnn::TransposeConvolution2dLayer>
274{
275};
276
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000277template<>
278struct DummyLayer<armnn::DetectionPostProcessLayer>
279{
280 DummyLayer()
281 {
282 m_Layer = dummyGraph.AddLayer<armnn::DetectionPostProcessLayer>(armnn::DetectionPostProcessDescriptor(), "");
James Conroy1f58f032021-04-27 17:13:27 +0100283 m_Layer->m_Anchors = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000284 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
285 }
286
287 ~DummyLayer()
288 {
289 dummyGraph.EraseLayer(m_Layer);
290 }
291
292 armnn::DetectionPostProcessLayer* m_Layer;
293};
294
telsoa01c577f2c2018-08-31 09:22:23 +0100295template <typename LstmLayerType>
296struct DummyLstmLayer
297{
298 DummyLstmLayer()
299 {
300 typename LstmLayerType::DescriptorType desc;
301 desc.m_CifgEnabled = false;
302
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100303 m_Layer = dummyGraph.AddLayer<LstmLayerType>(desc, "");
James Conroy1f58f032021-04-27 17:13:27 +0100304 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100305 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100306 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100307 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100308 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100309 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100310 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100311 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100312 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100313 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100314 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100315 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100316 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100317 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100318 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100319 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100320 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100321 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
322
James Conroy1f58f032021-04-27 17:13:27 +0100323 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100324 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100325 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100326 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100327 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100328 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
329 }
James Conroyee18dc82019-07-17 11:27:46 +0100330
telsoa01c577f2c2018-08-31 09:22:23 +0100331 ~DummyLstmLayer()
332 {
333 dummyGraph.EraseLayer(m_Layer);
334 }
James Conroyee18dc82019-07-17 11:27:46 +0100335
telsoa01c577f2c2018-08-31 09:22:23 +0100336 armnn::LstmLayer* m_Layer;
337};
338
339template<>
340struct DummyLayer<armnn::LstmLayer>
341 : public DummyLstmLayer<armnn::LstmLayer>
342{
343};
344
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100345template<>
346struct DummyLayer<armnn::QLstmLayer>
James Conroy586a9aa2020-03-20 08:49:33 +0000347{
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100348 DummyLayer()
James Conroy586a9aa2020-03-20 08:49:33 +0000349 {
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100350 armnn::QLstmLayer::DescriptorType desc;
James Conroy586a9aa2020-03-20 08:49:33 +0000351 desc.m_CifgEnabled = false;
352 desc.m_PeepholeEnabled = true;
353 desc.m_ProjectionEnabled = true;
354 desc.m_LayerNormEnabled = true;
355
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100356 m_Layer = dummyGraph.AddLayer<armnn::QLstmLayer>(desc, "qLstm");
James Conroy586a9aa2020-03-20 08:49:33 +0000357
358 // Basic params
James Conroy1f58f032021-04-27 17:13:27 +0100359 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000360 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100361 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000362 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100363 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000364 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
365
James Conroy1f58f032021-04-27 17:13:27 +0100366 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000367 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100368 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000369 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100370 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000371 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
372
James Conroy1f58f032021-04-27 17:13:27 +0100373 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000374 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
James Conroy1f58f032021-04-27 17:13:27 +0100375 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000376 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
James Conroy1f58f032021-04-27 17:13:27 +0100377 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000378 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
379
380 // CIFG optional params
James Conroy1f58f032021-04-27 17:13:27 +0100381 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000382 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100383 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000384 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100385 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000386 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
387
388 // Projection optional params
James Conroy1f58f032021-04-27 17:13:27 +0100389 m_Layer->m_ProjectionParameters.m_ProjectionWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000390 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100391 m_Layer->m_ProjectionParameters.m_ProjectionBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000392 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
393
394 // Peephole optional params
James Conroy1f58f032021-04-27 17:13:27 +0100395 m_Layer->m_PeepholeParameters.m_CellToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000396 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
James Conroy1f58f032021-04-27 17:13:27 +0100397 m_Layer->m_PeepholeParameters.m_CellToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000398 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
James Conroy1f58f032021-04-27 17:13:27 +0100399 m_Layer->m_PeepholeParameters.m_CellToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000400 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
401
402 // Layer normalization optional params
James Conroy1f58f032021-04-27 17:13:27 +0100403 m_Layer->m_LayerNormParameters.m_InputLayerNormWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000404 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
James Conroy1f58f032021-04-27 17:13:27 +0100405 m_Layer->m_LayerNormParameters.m_ForgetLayerNormWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000406 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
James Conroy1f58f032021-04-27 17:13:27 +0100407 m_Layer->m_LayerNormParameters.m_CellLayerNormWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000408 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
James Conroy1f58f032021-04-27 17:13:27 +0100409 m_Layer->m_LayerNormParameters.m_OutputLayerNormWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000410 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
411 }
412
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100413 ~DummyLayer()
James Conroy586a9aa2020-03-20 08:49:33 +0000414 {
415 dummyGraph.EraseLayer(m_Layer);
416 }
417
418 armnn::QLstmLayer* m_Layer;
419};
420
telsoa01c577f2c2018-08-31 09:22:23 +0100421template<>
James Conroyee18dc82019-07-17 11:27:46 +0100422struct DummyLayer<armnn::QuantizedLstmLayer, void>
423{
424 DummyLayer()
425 {
426 m_Layer = dummyGraph.AddLayer<armnn::QuantizedLstmLayer>("");
427
James Conroy1f58f032021-04-27 17:13:27 +0100428 m_Layer->m_QuantizedLstmParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000429 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroy1f58f032021-04-27 17:13:27 +0100430 m_Layer->m_QuantizedLstmParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000431 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroy1f58f032021-04-27 17:13:27 +0100432 m_Layer->m_QuantizedLstmParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000433 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroy1f58f032021-04-27 17:13:27 +0100434 m_Layer->m_QuantizedLstmParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000435 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100436
James Conroy1f58f032021-04-27 17:13:27 +0100437 m_Layer->m_QuantizedLstmParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000438 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroy1f58f032021-04-27 17:13:27 +0100439 m_Layer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000440 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroy1f58f032021-04-27 17:13:27 +0100441 m_Layer->m_QuantizedLstmParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000442 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroy1f58f032021-04-27 17:13:27 +0100443 m_Layer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000444 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100445
James Conroy1f58f032021-04-27 17:13:27 +0100446 m_Layer->m_QuantizedLstmParameters.m_InputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroyee18dc82019-07-17 11:27:46 +0100447 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
James Conroy1f58f032021-04-27 17:13:27 +0100448 m_Layer->m_QuantizedLstmParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroyee18dc82019-07-17 11:27:46 +0100449 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
James Conroy1f58f032021-04-27 17:13:27 +0100450 m_Layer->m_QuantizedLstmParameters.m_CellBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroyee18dc82019-07-17 11:27:46 +0100451 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
James Conroy1f58f032021-04-27 17:13:27 +0100452 m_Layer->m_QuantizedLstmParameters.m_OutputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroyee18dc82019-07-17 11:27:46 +0100453 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
454 }
455
456 ~DummyLayer()
457 {
458 dummyGraph.EraseLayer(m_Layer);
459 }
460
461 armnn::QuantizedLstmLayer* m_Layer;
462};
463
464template<>
telsoa01c577f2c2018-08-31 09:22:23 +0100465struct DummyLayer<armnn::FullyConnectedLayer>
466{
467 DummyLayer()
468 {
469 armnn::FullyConnectedLayer::DescriptorType desc;
470 m_Layer = dummyGraph.AddLayer<armnn::FullyConnectedLayer>(desc, "");
James Conroy1f58f032021-04-27 17:13:27 +0100471 m_Layer->m_Weight = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100472 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
473 }
James Conroyee18dc82019-07-17 11:27:46 +0100474
telsoa01c577f2c2018-08-31 09:22:23 +0100475 ~DummyLayer()
476 {
477 dummyGraph.EraseLayer(m_Layer);
478 }
James Conroyee18dc82019-07-17 11:27:46 +0100479
telsoa01c577f2c2018-08-31 09:22:23 +0100480 armnn::FullyConnectedLayer* m_Layer;
481};
482
telsoa014fcda012018-03-09 14:13:49 +0000483// Tag for giving LayerType entries a unique strong type each.
484template<armnn::LayerType>
485struct Tag{};
486
487#define DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, descType) \
488template<armnn::DataType DataType> \
489struct LayerTypePolicy<armnn::LayerType::name, DataType> \
490{ \
491 using Type = armnn::name##Layer; \
492 using Desc = descType; \
493 using QueueDesc = armnn::name##QueueDescriptor; \
494 constexpr static const char* NameStr = #name; \
Derek Lambertie606b7c2019-10-21 16:51:11 +0100495 constexpr static const bool IsException = false; \
telsoa014fcda012018-03-09 14:13:49 +0000496 \
497 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
498 unsigned int nIn, unsigned int nOut) \
499 { \
500 QueueDesc desc; \
501 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
502 return factory->Create##name(desc, info); \
503 } \
504};
505
Jim Flynn68db06f2020-10-06 10:14:50 +0100506#define DECLARE_LAYER_POLICY_MAP_PARAM(name, descType) \
507template<armnn::DataType DataType> \
508struct LayerTypePolicy<armnn::LayerType::name, DataType> \
509{ \
510 using Type = armnn::name##Layer; \
511 using Desc = descType; \
512 using QueueDesc = armnn::name##QueueDescriptor; \
513 using Workload = armnn::name##Workload; \
514 constexpr static const char* NameStr = #name; \
515 constexpr static const bool IsException = false; \
516 \
517 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory* factory, \
518 unsigned int nIn, unsigned int nOut) \
519 { \
520 IgnoreUnused(factory); \
521 QueueDesc desc; \
522 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
523 return std::make_unique<armnn::name##Workload>(desc, info); \
524 } \
525};
526
telsoa01c577f2c2018-08-31 09:22:23 +0100527// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000528// Use this version for layers whose constructor takes 1 parameter(name).
529#define DECLARE_LAYER_POLICY_1_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, void)
530
telsoa01c577f2c2018-08-31 09:22:23 +0100531// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000532// Use this version for layers whose constructor takes 2 parameters(descriptor and name).
533#define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor)
534
Derek Lamberti013c3902019-10-21 10:46:16 +0100535
536#define DECLARE_LAYER_POLICY_EXCEPTION(name, descType) \
537template<armnn::DataType DataType> \
538struct LayerTypePolicy<armnn::LayerType::name, DataType> \
539{ \
540 using Type = armnn::name##Layer; \
541 using Desc = descType; \
542 constexpr static const char* NameStr = #name; \
Derek Lambertib99ef392019-10-21 14:10:38 +0100543 constexpr static const bool IsException = true; \
Derek Lamberti013c3902019-10-21 10:46:16 +0100544 \
545 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
546 unsigned int nIn, unsigned int nOut) \
547 { \
Jan Eilers8eb25602020-03-09 12:13:48 +0000548 IgnoreUnused(factory, nIn, nOut); \
Derek Lamberti013c3902019-10-21 10:46:16 +0100549 return std::unique_ptr<armnn::IWorkload>(); \
550 } \
551};
552
553#define DECLARE_LAYER_POLICY_EXCEPTION_1_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, void)
554#define DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, armnn::name##Descriptor)
555
telsoa01c577f2c2018-08-31 09:22:23 +0100556// Layer policy template.
telsoa014fcda012018-03-09 14:13:49 +0000557template<armnn::LayerType Type, armnn::DataType DataType>
558struct LayerTypePolicy;
559
560// Every entry in the armnn::LayerType enum must be accounted for below.
561DECLARE_LAYER_POLICY_2_PARAM(Activation)
562
563DECLARE_LAYER_POLICY_1_PARAM(Addition)
564
Nikhil Rajee391d52019-09-05 17:50:44 +0100565DECLARE_LAYER_POLICY_2_PARAM(ArgMinMax)
566
telsoa014fcda012018-03-09 14:13:49 +0000567DECLARE_LAYER_POLICY_2_PARAM(BatchNormalization)
568
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000569DECLARE_LAYER_POLICY_2_PARAM(BatchToSpaceNd)
570
mathad01b392e982021-04-07 12:07:30 +0100571DECLARE_LAYER_POLICY_1_PARAM(Cast)
572
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100573DECLARE_LAYER_POLICY_2_PARAM(Comparison)
574
Jim Flynne242f2d2019-05-22 14:24:13 +0100575DECLARE_LAYER_POLICY_2_PARAM(Concat)
576
telsoa014fcda012018-03-09 14:13:49 +0000577DECLARE_LAYER_POLICY_1_PARAM(Constant)
578
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000579DECLARE_LAYER_POLICY_1_PARAM(ConvertBf16ToFp32)
580
telsoa01c577f2c2018-08-31 09:22:23 +0100581DECLARE_LAYER_POLICY_1_PARAM(ConvertFp16ToFp32)
582
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000583DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToBf16)
584
telsoa01c577f2c2018-08-31 09:22:23 +0100585DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToFp16)
586
telsoa014fcda012018-03-09 14:13:49 +0000587DECLARE_LAYER_POLICY_2_PARAM(Convolution2d)
588
589DECLARE_LAYER_POLICY_1_PARAM(MemCopy)
590
Derek Lambertif674aa02019-08-01 15:56:25 +0100591DECLARE_LAYER_POLICY_1_PARAM(MemImport)
592
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000593DECLARE_LAYER_POLICY_1_PARAM(Debug)
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000594
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100595DECLARE_LAYER_POLICY_2_PARAM(DepthToSpace)
596
telsoa014fcda012018-03-09 14:13:49 +0000597DECLARE_LAYER_POLICY_2_PARAM(DepthwiseConvolution2d)
598
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000599DECLARE_LAYER_POLICY_1_PARAM(Dequantize)
600
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000601DECLARE_LAYER_POLICY_2_PARAM(DetectionPostProcess)
602
josh minor4a3c6102020-01-06 16:40:46 -0600603DECLARE_LAYER_POLICY_2_PARAM(ElementwiseUnary)
604
telsoa014fcda012018-03-09 14:13:49 +0000605DECLARE_LAYER_POLICY_2_PARAM(FakeQuantization)
606
Ryan OSheaec6c6802020-06-05 17:17:06 +0100607DECLARE_LAYER_POLICY_2_PARAM(Fill)
608
telsoa014fcda012018-03-09 14:13:49 +0000609DECLARE_LAYER_POLICY_1_PARAM(Floor)
610
611DECLARE_LAYER_POLICY_2_PARAM(FullyConnected)
612
Teresa Charlin52664732020-06-29 16:27:03 +0100613DECLARE_LAYER_POLICY_2_PARAM(Gather)
narpra01b89b05f2019-01-16 09:53:09 +0000614
telsoa014fcda012018-03-09 14:13:49 +0000615DECLARE_LAYER_POLICY_CUSTOM_PARAM(Input, armnn::LayerBindingId)
616
Kevin Mayce5045a2019-10-02 14:07:47 +0100617DECLARE_LAYER_POLICY_2_PARAM(InstanceNormalization)
618
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100619DECLARE_LAYER_POLICY_2_PARAM(L2Normalization)
telsoa014fcda012018-03-09 14:13:49 +0000620
James Conroyaba90cd2020-11-06 16:28:18 +0000621DECLARE_LAYER_POLICY_2_PARAM(LogicalBinary)
622
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100623DECLARE_LAYER_POLICY_2_PARAM(LogSoftmax)
624
telsoa01c577f2c2018-08-31 09:22:23 +0100625DECLARE_LAYER_POLICY_2_PARAM(Lstm)
626
Jim Flynn68db06f2020-10-06 10:14:50 +0100627DECLARE_LAYER_POLICY_MAP_PARAM(Map, void)
628
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000629DECLARE_LAYER_POLICY_1_PARAM(Maximum)
630
narpra0132b90462018-09-13 11:07:48 +0100631DECLARE_LAYER_POLICY_2_PARAM(Mean)
632
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100633DECLARE_LAYER_POLICY_1_PARAM(Merge)
634
kevmay0190539692018-11-29 08:40:19 +0000635DECLARE_LAYER_POLICY_1_PARAM(Minimum)
636
telsoa014fcda012018-03-09 14:13:49 +0000637DECLARE_LAYER_POLICY_1_PARAM(Multiplication)
638
639DECLARE_LAYER_POLICY_2_PARAM(Normalization)
640
641DECLARE_LAYER_POLICY_CUSTOM_PARAM(Output, armnn::LayerBindingId)
642
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100643DECLARE_LAYER_POLICY_2_PARAM(Pad)
644
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000645DECLARE_LAYER_POLICY_1_PARAM(Quantize)
646
telsoa014fcda012018-03-09 14:13:49 +0000647DECLARE_LAYER_POLICY_2_PARAM(Permute)
648
649DECLARE_LAYER_POLICY_2_PARAM(Pooling2d)
650
Matteo Martincigh49124022019-01-11 13:25:59 +0000651DECLARE_LAYER_POLICY_2_PARAM(PreCompiled)
652
Matteo Martincigh0e406ee2019-06-12 15:42:18 +0100653DECLARE_LAYER_POLICY_1_PARAM(Prelu)
James Conroy586a9aa2020-03-20 08:49:33 +0000654DECLARE_LAYER_POLICY_2_PARAM(QLstm)
655
James Conroyee18dc82019-07-17 11:27:46 +0100656DECLARE_LAYER_POLICY_1_PARAM(QuantizedLstm)
657
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100658DECLARE_LAYER_POLICY_1_PARAM(Division)
659
Finn Williams2605b232020-06-10 15:53:46 +0100660DECLARE_LAYER_POLICY_1_PARAM(Rank)
661
Teresa Charlina9075df2019-06-27 15:41:57 +0100662DECLARE_LAYER_POLICY_2_PARAM(Resize)
663
telsoa01c577f2c2018-08-31 09:22:23 +0100664DECLARE_LAYER_POLICY_2_PARAM(Reshape)
665
Keith Davis3ae3f972021-05-21 16:33:48 +0100666DECLARE_LAYER_POLICY_1_PARAM(Shape)
667
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100668DECLARE_LAYER_POLICY_2_PARAM(Slice)
669
telsoa014fcda012018-03-09 14:13:49 +0000670DECLARE_LAYER_POLICY_2_PARAM(Softmax)
671
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000672DECLARE_LAYER_POLICY_2_PARAM(SpaceToBatchNd)
673
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100674DECLARE_LAYER_POLICY_2_PARAM(SpaceToDepth)
675
telsoa014fcda012018-03-09 14:13:49 +0000676DECLARE_LAYER_POLICY_2_PARAM(Splitter)
677
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100678DECLARE_LAYER_POLICY_2_PARAM(Stack)
679
Derek Lamberti013c3902019-10-21 10:46:16 +0100680DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(StandIn)
681
Conor Kennedy430b5d82018-11-14 15:28:28 +0000682DECLARE_LAYER_POLICY_2_PARAM(StridedSlice)
683
David Beckc2044fe2018-09-05 15:00:38 +0100684DECLARE_LAYER_POLICY_1_PARAM(Subtraction)
telsoa014fcda012018-03-09 14:13:49 +0000685
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000686DECLARE_LAYER_POLICY_2_PARAM(Reduce)
687
Sadik Armaganeff363d2019-04-05 15:25:46 +0100688DECLARE_LAYER_POLICY_1_PARAM(Switch)
689
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000690DECLARE_LAYER_POLICY_2_PARAM(Transpose)
691
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100692DECLARE_LAYER_POLICY_2_PARAM(TransposeConvolution2d)
693
Jim Flynn3a40ea52020-10-08 11:42:30 +0100694DECLARE_LAYER_POLICY_MAP_PARAM(Unmap, void)
695
telsoa014fcda012018-03-09 14:13:49 +0000696
697// Generic implementation to get the number of input slots for a given layer type;
698template<armnn::LayerType Type>
699unsigned int GetNumInputs(const armnn::Layer& layer)
700{
701 return layer.GetNumInputSlots();
702}
703
704// Generic implementation to get the number of output slots for a given layer type;
705template<armnn::LayerType Type>
706unsigned int GetNumOutputs(const armnn::Layer& layer)
707{
708 return layer.GetNumOutputSlots();
709}
710
711template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100712unsigned int GetNumInputs<armnn::LayerType::Concat>(const armnn::Layer& layer)
telsoa014fcda012018-03-09 14:13:49 +0000713{
Jan Eilers8eb25602020-03-09 12:13:48 +0000714 IgnoreUnused(layer);
telsoa014fcda012018-03-09 14:13:49 +0000715 return 2;
716}
717
telsoa01c577f2c2018-08-31 09:22:23 +0100718// Tests that the IsLayerSupported() function returns the correct value.
719// We determined the correct value by *trying* to create the relevant workload and seeing if it matches what we expect.
telsoa014fcda012018-03-09 14:13:49 +0000720// Returns true if expectations are met, otherwise returns false.
721template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
722bool IsLayerSupportedTest(FactoryType *factory, Tag<Type>)
723{
724 using LayerPolicy = LayerTypePolicy<Type, DataType>;
725 using LayerType = typename LayerPolicy::Type;
726 using LayerDesc = typename LayerPolicy::Desc;
727 DummyLayer<LayerType, LayerDesc> layer;
728
Derek Lambertib99ef392019-10-21 14:10:38 +0100729 if (LayerPolicy::IsException) //Don't test exceptions to the rule.
730 {
731 return true;
732 }
733
telsoa014fcda012018-03-09 14:13:49 +0000734 unsigned int numIn = GetNumInputs<Type>(*layer.m_Layer);
735 unsigned int numOut = GetNumOutputs<Type>(*layer.m_Layer);
736
telsoa01c577f2c2018-08-31 09:22:23 +0100737 // Make another dummy layer just to make IsLayerSupported have valid inputs.
telsoa014fcda012018-03-09 14:13:49 +0000738 DummyLayer<armnn::ConstantLayer, void> previousLayer;
telsoa01c577f2c2018-08-31 09:22:23 +0100739 // Set output of the previous layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000740 armnn::TensorInfo output = MakeDummyTensorInfo<DataType>();
741 previousLayer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
telsoa01c577f2c2018-08-31 09:22:23 +0100742 // Connect all outputs of the previous layer to inputs of tested layer.
telsoa014fcda012018-03-09 14:13:49 +0000743 for (unsigned int i = 0; i < numIn; i++)
744 {
745 armnn::IOutputSlot& previousLayerOutputSlot = previousLayer.m_Layer->GetOutputSlot(0);
746 armnn::IInputSlot& layerInputSlot = layer.m_Layer->GetInputSlot(i);
747 previousLayerOutputSlot.Connect(layerInputSlot);
748 }
telsoa01c577f2c2018-08-31 09:22:23 +0100749 // Set outputs of tested layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000750 for (unsigned int i = 0; i < numOut; i++)
751 {
752 layer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
753 }
754
755 std::string layerName = LayerPolicy::NameStr;
756 std::string reasonIfUnsupported;
757 if (FactoryType::IsLayerSupported(*layer.m_Layer, DataType, reasonIfUnsupported))
758 {
759 std::string errorMsg = " layer expected support but found none.";
760 try
761 {
762 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() != nullptr;
Sadik Armagan1625efc2021-06-10 18:24:34 +0100763 CHECK_MESSAGE(retVal, layerName << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000764 return retVal;
765 }
telsoa01c577f2c2018-08-31 09:22:23 +0100766 catch(const armnn::InvalidArgumentException& e)
telsoa014fcda012018-03-09 14:13:49 +0000767 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000768 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000769 // This is ok since we throw InvalidArgumentException when creating the dummy workload.
770 return true;
771 }
772 catch(const std::exception& e)
773 {
774 errorMsg = e.what();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100775 FAIL(layerName << ": " << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000776 return false;
777 }
telsoa01c577f2c2018-08-31 09:22:23 +0100778 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000779 {
780 errorMsg = "Unexpected error while testing support for ";
Sadik Armagan1625efc2021-06-10 18:24:34 +0100781 FAIL(errorMsg << layerName);
telsoa014fcda012018-03-09 14:13:49 +0000782 return false;
783 }
784 }
785 else
786 {
787 std::string errorMsg = "layer expected no support (giving reason: " + reasonIfUnsupported + ") but found some.";
788 try
789 {
790 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() == nullptr;
Sadik Armagan1625efc2021-06-10 18:24:34 +0100791 CHECK_MESSAGE(retVal, layerName << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000792 return retVal;
793 }
794 // These two exceptions are ok: For workloads that are partially supported, attempting to instantiate them
795 // using parameters that make IsLayerSupported() return false should throw an
telsoa01c577f2c2018-08-31 09:22:23 +0100796 // InvalidArgumentException or UnimplementedException.
telsoa014fcda012018-03-09 14:13:49 +0000797 catch(const armnn::InvalidArgumentException& e)
798 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000799 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000800 return true;
801 }
telsoa01c577f2c2018-08-31 09:22:23 +0100802 catch(const armnn::UnimplementedException& e)
telsoa014fcda012018-03-09 14:13:49 +0000803 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000804 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000805 return true;
806 }
807 catch(const std::exception& e)
808 {
809 errorMsg = e.what();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100810 FAIL(layerName << ": " << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000811 return false;
812 }
telsoa01c577f2c2018-08-31 09:22:23 +0100813 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000814 {
815 errorMsg = "Unexpected error while testing support for ";
Sadik Armagan1625efc2021-06-10 18:24:34 +0100816 FAIL(errorMsg << layerName);
telsoa014fcda012018-03-09 14:13:49 +0000817 return false;
818 }
819 }
820}
821
Jim Flynn68db06f2020-10-06 10:14:50 +0100822template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
823bool IsLayerSupportedTest(FactoryType *factory, Tag<armnn::LayerType::Map>)
824{
825 IgnoreUnused(factory);
826 return true;
827}
828
Jim Flynn3a40ea52020-10-08 11:42:30 +0100829template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
830bool IsLayerSupportedTest(FactoryType *factory, Tag<armnn::LayerType::Unmap>)
831{
832 IgnoreUnused(factory);
833 return true;
834}
835
telsoa01c577f2c2018-08-31 09:22:23 +0100836// Helper function to compute the next type in the LayerType enum.
telsoa014fcda012018-03-09 14:13:49 +0000837constexpr armnn::LayerType NextType(armnn::LayerType type)
838{
839 return static_cast<armnn::LayerType>(static_cast<int>(type)+1);
840}
841
telsoa01c577f2c2018-08-31 09:22:23 +0100842// Termination function for determining the end of the LayerType enumeration.
telsoa014fcda012018-03-09 14:13:49 +0000843template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
844bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<armnn::LayerType::LastLayer>)
845{
846 return IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000847}
telsoa014fcda012018-03-09 14:13:49 +0000848
telsoa01c577f2c2018-08-31 09:22:23 +0100849// Recursive function to test and enter in the LayerType enum and then iterate on the next entry.
telsoa014fcda012018-03-09 14:13:49 +0000850template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
851bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<Type>)
852{
853 bool v = IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
854
855 return v &&
856 IsLayerSupportedTestsImpl<FactoryType, DataType, NextType(Type)>
857 (factory, Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000858}
telsoa014fcda012018-03-09 14:13:49 +0000859
860// Helper function to pass through to the test framework.
861template<typename FactoryType, armnn::DataType DataType>
862bool IsLayerSupportedTests(FactoryType *factory)
863{
864 return IsLayerSupportedTestsImpl<FactoryType, DataType>(factory, Tag<armnn::LayerType::FirstLayer>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000865}
telsoa014fcda012018-03-09 14:13:49 +0000866
867template<armnn::LayerType Type>
868bool TestLayerTypeMatches()
869{
870 using LayerPolicy = LayerTypePolicy<Type, armnn::DataType::Float32>;
871 using LayerType = typename LayerPolicy::Type;
872 using LayerDesc = typename LayerPolicy::Desc;
873 DummyLayer<LayerType, LayerDesc> layer;
874
875 std::stringstream ss;
876 ss << LayerPolicy::NameStr << " layer type mismatches expected layer type value.";
877 bool v = Type == layer.m_Layer->GetType();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100878 CHECK_MESSAGE(v, ss.str());
telsoa014fcda012018-03-09 14:13:49 +0000879 return v;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000880}
telsoa014fcda012018-03-09 14:13:49 +0000881
882template<armnn::LayerType Type>
883bool LayerTypeMatchesTestImpl(Tag<armnn::LayerType::LastLayer>)
884{
885 return TestLayerTypeMatches<Type>();
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000886}
telsoa014fcda012018-03-09 14:13:49 +0000887
888template<armnn::LayerType Type>
889bool LayerTypeMatchesTestImpl(Tag<Type>)
890{
891 return TestLayerTypeMatches<Type>() &&
892 LayerTypeMatchesTestImpl<NextType(Type)>(Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000893}
telsoa014fcda012018-03-09 14:13:49 +0000894
telsoa01c577f2c2018-08-31 09:22:23 +0100895template<typename FactoryType, typename LayerType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
896bool IsConvertLayerSupportedTests(std::string& reasonIfUnsupported)
897{
898 armnn::Graph graph;
899 LayerType* const layer = graph.AddLayer<LayerType>("LayerName");
900
901 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
902 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
903
904 armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, InputDataType);
905 armnn::TensorInfo outputTensorInfo({1, 3, 2, 3}, OutputDataType);
906
907 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
908 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
909 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
910 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
911
912 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
913
914 return result;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000915}
telsoa01c577f2c2018-08-31 09:22:23 +0100916
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000917template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
James Conroy177df1e2020-11-13 10:18:51 +0000918bool IsLogicalBinaryLayerSupportedTests(std::string& reasonIfUnsupported)
919{
920 armnn::Graph graph;
921 armnn::LogicalBinaryDescriptor desc(armnn::LogicalBinaryOperation::LogicalOr);
922
923 armnn::Layer* const input0 = graph.AddLayer<armnn::InputLayer>(0, "input0");
924 armnn::Layer* const input1 = graph.AddLayer<armnn::InputLayer>(1, "input1");
925
926 armnn::Layer* const layer = graph.AddLayer<armnn::LogicalBinaryLayer>(desc, "logicalOrLayer");
927
928 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output1");
929
930 armnn::TensorInfo inputTensorInfo0({1, 1, 1, 4}, InputDataType);
931 armnn::TensorInfo inputTensorInfo1({1, 1, 1, 4}, InputDataType);
932
933 armnn::TensorInfo outputTensorInfo({1, 1, 1, 4}, OutputDataType);
934
935 input0->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
936 input1->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
937
938 input0->GetOutputHandler(0).SetTensorInfo(inputTensorInfo0);
939 input1->GetOutputHandler(0).SetTensorInfo(inputTensorInfo1);
940
941 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
942 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
943
944 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
945
946 return result;
947}
948
949template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
950bool IsLogicalBinaryLayerBroadcastSupportedTests(std::string& reasonIfUnsupported)
951{
952 armnn::Graph graph;
953 armnn::LogicalBinaryDescriptor desc(armnn::LogicalBinaryOperation::LogicalAnd);
954
955 armnn::Layer* const input0 = graph.AddLayer<armnn::InputLayer>(0, "input0");
956 armnn::Layer* const input1 = graph.AddLayer<armnn::InputLayer>(1, "input1");
957
958 armnn::Layer* const layer = graph.AddLayer<armnn::LogicalBinaryLayer>(desc, "logicalAndLayer");
959
960 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output2");
961
962 armnn::TensorInfo inputTensorInfo0({1, 1, 1, 4}, InputDataType);
963 armnn::TensorInfo inputTensorInfo1({1, 1, 1, 1}, InputDataType);
964
965 armnn::TensorInfo outputTensorInfo({1, 1, 1, 4}, OutputDataType);
966
967 input0->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
968 input1->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
969
970 input0->GetOutputHandler(0).SetTensorInfo(inputTensorInfo0);
971 input1->GetOutputHandler(0).SetTensorInfo(inputTensorInfo1);
972
973 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
974 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
975
976 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
977
978 return result;
979}
980
981template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
Matthew Bentham1f0ff352019-01-02 13:26:31 +0000982bool IsMeanLayerSupportedTests(std::string& reasonIfUnsupported)
983{
984 armnn::Graph graph;
985 static const std::vector<unsigned> axes = {1, 0};
986 armnn::MeanDescriptor desc(axes, false);
987
988 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
989
990 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
991 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
992
993 armnn::TensorInfo inputTensorInfo({4, 3, 2}, InputDataType);
994 armnn::TensorInfo outputTensorInfo({2}, OutputDataType);
995
996 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
997 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
998 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
999 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
1000
1001 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
1002
1003 return result;
1004}
1005
James Conroy4d1ff582019-06-10 17:06:39 +01001006// Tests that IsMeanSupported fails when input tensor dimensions
1007// do not match output tensor dimensions when keepDims == true
1008template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
1009bool IsMeanLayerNotSupportedTests(std::string& reasonIfUnsupported)
1010{
1011 armnn::Graph graph;
1012 static const std::vector<unsigned> axes = {};
1013 // Set keepDims == true
1014 armnn::MeanDescriptor desc(axes, true);
1015
1016 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
1017
1018 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
1019 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
1020
1021 // Mismatching number of tensor dimensions
1022 armnn::TensorInfo inputTensorInfo({1, 1, 1, 1}, InputDataType);
1023 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
1024
1025 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
1026 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
1027 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1028 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
1029
1030 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
1031
1032 return result;
1033}
1034
Mike Kelly0886ac42020-04-27 09:55:40 +01001035template<typename FactoryType, armnn::DataType OutputDataType>
1036bool IsConstantLayerSupportedTests(std::string& reasonIfUnsupported)
1037{
1038 armnn::Graph graph;
1039
1040 armnn::Layer* const layer = graph.AddLayer<armnn::ConstantLayer>("ConstantLayerName");
1041 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "OutputLayerName");
1042
1043 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
1044
1045 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1046 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
1047
1048 bool result = FactoryType::IsLayerSupported(*layer, OutputDataType, reasonIfUnsupported);
1049
1050 return result;
1051}
Matthew Bentham1f0ff352019-01-02 13:26:31 +00001052
telsoa014fcda012018-03-09 14:13:49 +00001053} //namespace