blob: 9f472e9f28f484f747b6c850c76eaeaa808b6f59 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Mike Kelly3ec30772023-03-08 13:47:17 +00002// Copyright © 2017-2023 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00007#include <Graph.hpp>
8
Jim Flynn68db06f2020-10-06 10:14:50 +01009#include <backendsCommon/MapWorkload.hpp>
Jim Flynn3a40ea52020-10-08 11:42:30 +010010#include <backendsCommon/UnmapWorkload.hpp>
Colm Donelan0c479742021-12-10 12:43:54 +000011#include <armnn/backends/WorkloadFactory.hpp>
telsoa014fcda012018-03-09 14:13:49 +000012
Jan Eilers8eb25602020-03-09 12:13:48 +000013#include <armnn/utility/IgnoreUnused.hpp>
telsoa014fcda012018-03-09 14:13:49 +000014
Sadik Armagan1625efc2021-06-10 18:24:34 +010015#include <doctest/doctest.h>
16
telsoa014fcda012018-03-09 14:13:49 +000017namespace
18{
19armnn::Graph dummyGraph;
20
telsoa01c577f2c2018-08-31 09:22:23 +010021// Make a dummy TensorInfo object.
telsoa014fcda012018-03-09 14:13:49 +000022template<armnn::DataType DataType>
23armnn::TensorInfo MakeDummyTensorInfo()
24{
Teresa Charlin33d58272020-01-28 12:24:34 +000025 return armnn::TensorInfo({2,2,2,2}, DataType, 1.0, 0);
telsoa014fcda012018-03-09 14:13:49 +000026}
27
28
29// Make a dummy WorkloadInfo using a dummy TensorInfo.
30template<armnn::DataType DataType>
31armnn::WorkloadInfo MakeDummyWorkloadInfo(unsigned int numInputs, unsigned int numOutputs)
32{
33 armnn::WorkloadInfo info;
James Conroyee18dc82019-07-17 11:27:46 +010034
telsoa014fcda012018-03-09 14:13:49 +000035 for (unsigned int i=0; i < numInputs; i++)
36 {
37 info.m_InputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
38 }
James Conroyee18dc82019-07-17 11:27:46 +010039
telsoa014fcda012018-03-09 14:13:49 +000040 for (unsigned int o=0; o < numOutputs; o++)
41 {
42 info.m_OutputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
43 }
James Conroyee18dc82019-07-17 11:27:46 +010044
telsoa014fcda012018-03-09 14:13:49 +000045 return info;
46}
47
telsoa01c577f2c2018-08-31 09:22:23 +010048// Template class to create a dummy layer (2 parameters).
telsoa014fcda012018-03-09 14:13:49 +000049template<typename LayerType, typename DescType = typename LayerType::DescriptorType>
50struct DummyLayer
51{
52 DummyLayer()
53 {
54 m_Layer = dummyGraph.AddLayer<LayerType>(DescType(), "");
55 }
James Conroyee18dc82019-07-17 11:27:46 +010056
telsoa014fcda012018-03-09 14:13:49 +000057 ~DummyLayer()
58 {
59 dummyGraph.EraseLayer(m_Layer);
60 }
James Conroyee18dc82019-07-17 11:27:46 +010061
telsoa014fcda012018-03-09 14:13:49 +000062 LayerType* m_Layer;
63};
64
telsoa01c577f2c2018-08-31 09:22:23 +010065// Template class to create a dummy layer (1 parameter).
telsoa014fcda012018-03-09 14:13:49 +000066template<typename LayerType>
67struct DummyLayer<LayerType, void>
68{
69 DummyLayer()
70 {
71 m_Layer = dummyGraph.AddLayer<LayerType>("");
72 }
James Conroyee18dc82019-07-17 11:27:46 +010073
telsoa014fcda012018-03-09 14:13:49 +000074 ~DummyLayer()
75 {
76 dummyGraph.EraseLayer(m_Layer);
77 }
James Conroyee18dc82019-07-17 11:27:46 +010078
telsoa014fcda012018-03-09 14:13:49 +000079 LayerType* m_Layer;
80};
81
82template<>
telsoa01c577f2c2018-08-31 09:22:23 +010083struct DummyLayer<armnn::BatchNormalizationLayer>
84{
85 DummyLayer()
86 {
87 m_Layer = dummyGraph.AddLayer<armnn::BatchNormalizationLayer>(armnn::BatchNormalizationDescriptor(), "");
James Conroy1f58f032021-04-27 17:13:27 +010088 m_Layer->m_Mean = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +010089 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +010090 m_Layer->m_Variance = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +010091 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +010092 m_Layer->m_Beta = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +010093 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +010094 m_Layer->m_Gamma = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +010095 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
96 }
James Conroyee18dc82019-07-17 11:27:46 +010097
telsoa01c577f2c2018-08-31 09:22:23 +010098 ~DummyLayer()
99 {
100 dummyGraph.EraseLayer(m_Layer);
101 }
telsoa01c577f2c2018-08-31 09:22:23 +0100102
James Conroyee18dc82019-07-17 11:27:46 +0100103 armnn::BatchNormalizationLayer* m_Layer;
telsoa01c577f2c2018-08-31 09:22:23 +0100104};
105
106template<>
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000107struct DummyLayer<armnn::BatchToSpaceNdLayer>
108{
109 DummyLayer()
110 {
111 m_Layer = dummyGraph.AddLayer<armnn::BatchToSpaceNdLayer>(armnn::BatchToSpaceNdDescriptor(), "");
112 }
James Conroyee18dc82019-07-17 11:27:46 +0100113
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000114 ~DummyLayer()
115 {
116 dummyGraph.EraseLayer(m_Layer);
117 }
James Conroyee18dc82019-07-17 11:27:46 +0100118
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000119 armnn::BatchToSpaceNdLayer* m_Layer;
120};
121
122template<>
telsoa014fcda012018-03-09 14:13:49 +0000123struct DummyLayer<armnn::ConstantLayer, void>
124{
125 DummyLayer()
126 {
telsoa01c577f2c2018-08-31 09:22:23 +0100127 m_Layer = dummyGraph.AddLayer<armnn::ConstantLayer>("");
telsoa014fcda012018-03-09 14:13:49 +0000128 }
James Conroyee18dc82019-07-17 11:27:46 +0100129
telsoa014fcda012018-03-09 14:13:49 +0000130 ~DummyLayer()
131 {
132 dummyGraph.EraseLayer(m_Layer);
133 }
James Conroyee18dc82019-07-17 11:27:46 +0100134
telsoa014fcda012018-03-09 14:13:49 +0000135 armnn::ConstantLayer* m_Layer;
136};
137
138template<>
139struct DummyLayer<armnn::InputLayer, armnn::LayerBindingId>
140{
141 DummyLayer()
142 {
143 m_Layer = dummyGraph.AddLayer<armnn::InputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000144 }
James Conroyee18dc82019-07-17 11:27:46 +0100145
telsoa014fcda012018-03-09 14:13:49 +0000146 ~DummyLayer()
147 {
148 dummyGraph.EraseLayer(m_Layer);
149 }
James Conroyee18dc82019-07-17 11:27:46 +0100150
telsoa014fcda012018-03-09 14:13:49 +0000151 armnn::InputLayer* m_Layer;
152};
153
154template<>
Jim Flynne242f2d2019-05-22 14:24:13 +0100155struct DummyLayer<armnn::ConcatLayer>
telsoa014fcda012018-03-09 14:13:49 +0000156{
157 DummyLayer()
158 {
159 armnn::OriginsDescriptor desc(2);
Jim Flynne242f2d2019-05-22 14:24:13 +0100160 m_Layer = dummyGraph.AddLayer<armnn::ConcatLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000161 }
James Conroyee18dc82019-07-17 11:27:46 +0100162
telsoa014fcda012018-03-09 14:13:49 +0000163 ~DummyLayer()
164 {
165 dummyGraph.EraseLayer(m_Layer);
166 }
James Conroyee18dc82019-07-17 11:27:46 +0100167
Jim Flynne242f2d2019-05-22 14:24:13 +0100168 armnn::ConcatLayer* m_Layer;
telsoa014fcda012018-03-09 14:13:49 +0000169};
170
171template<>
Jim Flynn68db06f2020-10-06 10:14:50 +0100172struct DummyLayer<armnn::MapLayer, void>
173{
174 DummyLayer()
175 {
176 m_Layer = dummyGraph.AddLayer<armnn::MapLayer>("");
177 }
178
179 ~DummyLayer()
180 {
181 dummyGraph.EraseLayer(m_Layer);
182 }
183
184 armnn::MapLayer* m_Layer;
185};
186
187template<>
telsoa014fcda012018-03-09 14:13:49 +0000188struct DummyLayer<armnn::OutputLayer, armnn::LayerBindingId>
189{
190 DummyLayer()
191 {
192 m_Layer = dummyGraph.AddLayer<armnn::OutputLayer>(armnn::LayerBindingId(), "");
telsoa014fcda012018-03-09 14:13:49 +0000193 }
James Conroyee18dc82019-07-17 11:27:46 +0100194
telsoa014fcda012018-03-09 14:13:49 +0000195 ~DummyLayer()
196 {
197 dummyGraph.EraseLayer(m_Layer);
198 }
James Conroyee18dc82019-07-17 11:27:46 +0100199
telsoa014fcda012018-03-09 14:13:49 +0000200 armnn::OutputLayer* m_Layer;
201};
202
203template<>
204struct DummyLayer<armnn::SplitterLayer>
205{
206 DummyLayer()
207 {
208 armnn::ViewsDescriptor desc(1);
209 m_Layer = dummyGraph.AddLayer<armnn::SplitterLayer>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000210 }
James Conroyee18dc82019-07-17 11:27:46 +0100211
telsoa014fcda012018-03-09 14:13:49 +0000212 ~DummyLayer()
213 {
214 dummyGraph.EraseLayer(m_Layer);
215 }
James Conroyee18dc82019-07-17 11:27:46 +0100216
telsoa014fcda012018-03-09 14:13:49 +0000217 armnn::SplitterLayer* m_Layer;
218};
219
Jim Flynn3a40ea52020-10-08 11:42:30 +0100220template<>
221struct DummyLayer<armnn::UnmapLayer, void>
222{
223 DummyLayer()
224 {
225 m_Layer = dummyGraph.AddLayer<armnn::UnmapLayer>("");
226 }
227
228 ~DummyLayer()
229 {
230 dummyGraph.EraseLayer(m_Layer);
231 }
232
233 armnn::UnmapLayer* m_Layer;
234};
235
telsoa014fcda012018-03-09 14:13:49 +0000236template <typename ConvolutionLayerType>
237struct DummyConvolutionLayer
238{
239 DummyConvolutionLayer()
240 {
241 typename ConvolutionLayerType::DescriptorType desc;
James Conroy663c1842019-11-01 15:21:48 +0000242 desc.m_StrideX = 1;
243 desc.m_StrideY = 1;
telsoa014fcda012018-03-09 14:13:49 +0000244 m_Layer = dummyGraph.AddLayer<ConvolutionLayerType>(desc, "");
telsoa014fcda012018-03-09 14:13:49 +0000245 }
James Conroyee18dc82019-07-17 11:27:46 +0100246
telsoa014fcda012018-03-09 14:13:49 +0000247 ~DummyConvolutionLayer()
248 {
249 dummyGraph.EraseLayer(m_Layer);
250 }
James Conroyee18dc82019-07-17 11:27:46 +0100251
telsoa014fcda012018-03-09 14:13:49 +0000252 ConvolutionLayerType* m_Layer;
253};
254
255template<>
256struct DummyLayer<armnn::Convolution2dLayer>
257 : public DummyConvolutionLayer<armnn::Convolution2dLayer>
258{
259};
260
261template<>
262struct DummyLayer<armnn::DepthwiseConvolution2dLayer>
263 : public DummyConvolutionLayer<armnn::DepthwiseConvolution2dLayer>
264{
265};
266
Mike Kellyec67a0f2022-11-25 13:55:24 +0000267// Note: When m_Weight and m_Bias are removed from TransposeConvolution, Transpose can use DummyConvolutionLayer
268template <>
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100269struct DummyLayer<armnn::TransposeConvolution2dLayer>
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100270{
Mike Kellyec67a0f2022-11-25 13:55:24 +0000271 DummyLayer()
272 {
273 typename armnn::TransposeConvolution2dLayer::DescriptorType desc;
274 desc.m_StrideX = 1;
275 desc.m_StrideY = 1;
276 m_Layer = dummyGraph.AddLayer<armnn::TransposeConvolution2dLayer>(desc, "");
277 m_Layer->m_Weight = std::make_unique<armnn::ScopedTensorHandle>(
278 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
279 m_Layer->m_Bias = std::make_unique<armnn::ScopedTensorHandle>(
280 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
281 }
282
283 ~DummyLayer()
284 {
285 dummyGraph.EraseLayer(m_Layer);
286 }
287
288 armnn::TransposeConvolution2dLayer* m_Layer;
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100289};
290
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000291template<>
292struct DummyLayer<armnn::DetectionPostProcessLayer>
293{
294 DummyLayer()
295 {
296 m_Layer = dummyGraph.AddLayer<armnn::DetectionPostProcessLayer>(armnn::DetectionPostProcessDescriptor(), "");
James Conroy1f58f032021-04-27 17:13:27 +0100297 m_Layer->m_Anchors = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000298 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
299 }
300
301 ~DummyLayer()
302 {
303 dummyGraph.EraseLayer(m_Layer);
304 }
305
306 armnn::DetectionPostProcessLayer* m_Layer;
307};
308
telsoa01c577f2c2018-08-31 09:22:23 +0100309template <typename LstmLayerType>
310struct DummyLstmLayer
311{
312 DummyLstmLayer()
313 {
314 typename LstmLayerType::DescriptorType desc;
315 desc.m_CifgEnabled = false;
316
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100317 m_Layer = dummyGraph.AddLayer<LstmLayerType>(desc, "");
James Conroy1f58f032021-04-27 17:13:27 +0100318 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100319 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100320 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100321 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100322 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100323 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100324 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100325 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100326 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100327 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100328 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100329 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100330 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100331 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100332 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100333 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100334 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100335 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
336
James Conroy1f58f032021-04-27 17:13:27 +0100337 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100338 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100339 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100340 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
James Conroy1f58f032021-04-27 17:13:27 +0100341 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
telsoa01c577f2c2018-08-31 09:22:23 +0100342 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
343 }
James Conroyee18dc82019-07-17 11:27:46 +0100344
telsoa01c577f2c2018-08-31 09:22:23 +0100345 ~DummyLstmLayer()
346 {
347 dummyGraph.EraseLayer(m_Layer);
348 }
James Conroyee18dc82019-07-17 11:27:46 +0100349
telsoa01c577f2c2018-08-31 09:22:23 +0100350 armnn::LstmLayer* m_Layer;
351};
352
353template<>
354struct DummyLayer<armnn::LstmLayer>
355 : public DummyLstmLayer<armnn::LstmLayer>
356{
357};
358
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +0100359template <typename UnidirectionalSequenceLstmLayerType>
360struct DummyUnidirectionalSequenceLstmLayer
361{
362 DummyUnidirectionalSequenceLstmLayer()
363 {
364 typename UnidirectionalSequenceLstmLayerType::DescriptorType desc;
365 desc.m_CifgEnabled = false;
366
367 m_Layer = dummyGraph.AddLayer<UnidirectionalSequenceLstmLayerType>(desc, "");
368 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
369 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
370 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
371 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
372 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
373 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
374 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
375 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
376 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
377 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
378 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
379 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
380 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedTensorHandle>(
381 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
382 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedTensorHandle>(
383 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
384 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
385 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
386
387 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
388 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
389 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
390 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
391 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
392 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
393 }
394
395 ~DummyUnidirectionalSequenceLstmLayer()
396 {
397 dummyGraph.EraseLayer(m_Layer);
398 }
399
400 armnn::UnidirectionalSequenceLstmLayer* m_Layer;
401};
402
403template<>
404struct DummyLayer<armnn::UnidirectionalSequenceLstmLayer>
405 : public DummyUnidirectionalSequenceLstmLayer<armnn::UnidirectionalSequenceLstmLayer>
406{
407};
408
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100409template<>
410struct DummyLayer<armnn::QLstmLayer>
James Conroy586a9aa2020-03-20 08:49:33 +0000411{
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100412 DummyLayer()
James Conroy586a9aa2020-03-20 08:49:33 +0000413 {
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100414 armnn::QLstmLayer::DescriptorType desc;
James Conroy586a9aa2020-03-20 08:49:33 +0000415 desc.m_CifgEnabled = false;
416 desc.m_PeepholeEnabled = true;
417 desc.m_ProjectionEnabled = true;
418 desc.m_LayerNormEnabled = true;
419
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100420 m_Layer = dummyGraph.AddLayer<armnn::QLstmLayer>(desc, "qLstm");
James Conroy586a9aa2020-03-20 08:49:33 +0000421
422 // Basic params
James Conroy1f58f032021-04-27 17:13:27 +0100423 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000424 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100425 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000426 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100427 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000428 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
429
James Conroy1f58f032021-04-27 17:13:27 +0100430 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000431 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100432 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000433 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100434 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000435 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
436
James Conroy1f58f032021-04-27 17:13:27 +0100437 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000438 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
James Conroy1f58f032021-04-27 17:13:27 +0100439 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000440 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
James Conroy1f58f032021-04-27 17:13:27 +0100441 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000442 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
443
444 // CIFG optional params
James Conroy1f58f032021-04-27 17:13:27 +0100445 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000446 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100447 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000448 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100449 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000450 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
451
452 // Projection optional params
James Conroy1f58f032021-04-27 17:13:27 +0100453 m_Layer->m_ProjectionParameters.m_ProjectionWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000454 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8));
James Conroy1f58f032021-04-27 17:13:27 +0100455 m_Layer->m_ProjectionParameters.m_ProjectionBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000456 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
457
458 // Peephole optional params
James Conroy1f58f032021-04-27 17:13:27 +0100459 m_Layer->m_PeepholeParameters.m_CellToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000460 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
James Conroy1f58f032021-04-27 17:13:27 +0100461 m_Layer->m_PeepholeParameters.m_CellToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000462 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
James Conroy1f58f032021-04-27 17:13:27 +0100463 m_Layer->m_PeepholeParameters.m_CellToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000464 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
465
466 // Layer normalization optional params
James Conroy1f58f032021-04-27 17:13:27 +0100467 m_Layer->m_LayerNormParameters.m_InputLayerNormWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000468 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
James Conroy1f58f032021-04-27 17:13:27 +0100469 m_Layer->m_LayerNormParameters.m_ForgetLayerNormWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000470 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
James Conroy1f58f032021-04-27 17:13:27 +0100471 m_Layer->m_LayerNormParameters.m_CellLayerNormWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000472 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
James Conroy1f58f032021-04-27 17:13:27 +0100473 m_Layer->m_LayerNormParameters.m_OutputLayerNormWeights = std::make_unique<armnn::ScopedTensorHandle>(
James Conroy586a9aa2020-03-20 08:49:33 +0000474 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16));
475 }
476
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100477 ~DummyLayer()
James Conroy586a9aa2020-03-20 08:49:33 +0000478 {
479 dummyGraph.EraseLayer(m_Layer);
480 }
481
482 armnn::QLstmLayer* m_Layer;
483};
484
telsoa01c577f2c2018-08-31 09:22:23 +0100485template<>
James Conroyee18dc82019-07-17 11:27:46 +0100486struct DummyLayer<armnn::QuantizedLstmLayer, void>
487{
488 DummyLayer()
489 {
490 m_Layer = dummyGraph.AddLayer<armnn::QuantizedLstmLayer>("");
491
James Conroy1f58f032021-04-27 17:13:27 +0100492 m_Layer->m_QuantizedLstmParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000493 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroy1f58f032021-04-27 17:13:27 +0100494 m_Layer->m_QuantizedLstmParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000495 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroy1f58f032021-04-27 17:13:27 +0100496 m_Layer->m_QuantizedLstmParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000497 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroy1f58f032021-04-27 17:13:27 +0100498 m_Layer->m_QuantizedLstmParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000499 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100500
James Conroy1f58f032021-04-27 17:13:27 +0100501 m_Layer->m_QuantizedLstmParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000502 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroy1f58f032021-04-27 17:13:27 +0100503 m_Layer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000504 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroy1f58f032021-04-27 17:13:27 +0100505 m_Layer->m_QuantizedLstmParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000506 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroy1f58f032021-04-27 17:13:27 +0100507 m_Layer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>(
Derek Lambertif90c56d2020-01-10 17:14:08 +0000508 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8));
James Conroyee18dc82019-07-17 11:27:46 +0100509
James Conroy1f58f032021-04-27 17:13:27 +0100510 m_Layer->m_QuantizedLstmParameters.m_InputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroyee18dc82019-07-17 11:27:46 +0100511 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
James Conroy1f58f032021-04-27 17:13:27 +0100512 m_Layer->m_QuantizedLstmParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroyee18dc82019-07-17 11:27:46 +0100513 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
James Conroy1f58f032021-04-27 17:13:27 +0100514 m_Layer->m_QuantizedLstmParameters.m_CellBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroyee18dc82019-07-17 11:27:46 +0100515 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
James Conroy1f58f032021-04-27 17:13:27 +0100516 m_Layer->m_QuantizedLstmParameters.m_OutputGateBias = std::make_unique<armnn::ScopedTensorHandle>(
James Conroyee18dc82019-07-17 11:27:46 +0100517 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32));
518 }
519
520 ~DummyLayer()
521 {
522 dummyGraph.EraseLayer(m_Layer);
523 }
524
525 armnn::QuantizedLstmLayer* m_Layer;
526};
527
528template<>
telsoa01c577f2c2018-08-31 09:22:23 +0100529struct DummyLayer<armnn::FullyConnectedLayer>
530{
531 DummyLayer()
532 {
533 armnn::FullyConnectedLayer::DescriptorType desc;
534 m_Layer = dummyGraph.AddLayer<armnn::FullyConnectedLayer>(desc, "");
telsoa01c577f2c2018-08-31 09:22:23 +0100535 }
James Conroyee18dc82019-07-17 11:27:46 +0100536
telsoa01c577f2c2018-08-31 09:22:23 +0100537 ~DummyLayer()
538 {
539 dummyGraph.EraseLayer(m_Layer);
540 }
James Conroyee18dc82019-07-17 11:27:46 +0100541
telsoa01c577f2c2018-08-31 09:22:23 +0100542 armnn::FullyConnectedLayer* m_Layer;
543};
544
telsoa014fcda012018-03-09 14:13:49 +0000545// Tag for giving LayerType entries a unique strong type each.
546template<armnn::LayerType>
547struct Tag{};
548
549#define DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, descType) \
550template<armnn::DataType DataType> \
551struct LayerTypePolicy<armnn::LayerType::name, DataType> \
552{ \
553 using Type = armnn::name##Layer; \
554 using Desc = descType; \
555 using QueueDesc = armnn::name##QueueDescriptor; \
556 constexpr static const char* NameStr = #name; \
Derek Lambertie606b7c2019-10-21 16:51:11 +0100557 constexpr static const bool IsException = false; \
telsoa014fcda012018-03-09 14:13:49 +0000558 \
559 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
560 unsigned int nIn, unsigned int nOut) \
561 { \
562 QueueDesc desc; \
563 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
Teresa Charlin611c7fb2022-01-07 09:47:29 +0000564 return factory->CreateWorkload(armnn::LayerType::name, desc, info); \
telsoa014fcda012018-03-09 14:13:49 +0000565 } \
566};
567
Jim Flynn68db06f2020-10-06 10:14:50 +0100568#define DECLARE_LAYER_POLICY_MAP_PARAM(name, descType) \
569template<armnn::DataType DataType> \
570struct LayerTypePolicy<armnn::LayerType::name, DataType> \
571{ \
572 using Type = armnn::name##Layer; \
573 using Desc = descType; \
574 using QueueDesc = armnn::name##QueueDescriptor; \
575 using Workload = armnn::name##Workload; \
576 constexpr static const char* NameStr = #name; \
577 constexpr static const bool IsException = false; \
578 \
579 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory* factory, \
580 unsigned int nIn, unsigned int nOut) \
581 { \
582 IgnoreUnused(factory); \
583 QueueDesc desc; \
584 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
585 return std::make_unique<armnn::name##Workload>(desc, info); \
586 } \
587};
588
telsoa01c577f2c2018-08-31 09:22:23 +0100589// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000590// Use this version for layers whose constructor takes 1 parameter(name).
591#define DECLARE_LAYER_POLICY_1_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, void)
592
telsoa01c577f2c2018-08-31 09:22:23 +0100593// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000594// Use this version for layers whose constructor takes 2 parameters(descriptor and name).
595#define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor)
596
Derek Lamberti013c3902019-10-21 10:46:16 +0100597
598#define DECLARE_LAYER_POLICY_EXCEPTION(name, descType) \
599template<armnn::DataType DataType> \
600struct LayerTypePolicy<armnn::LayerType::name, DataType> \
601{ \
602 using Type = armnn::name##Layer; \
603 using Desc = descType; \
604 constexpr static const char* NameStr = #name; \
Derek Lambertib99ef392019-10-21 14:10:38 +0100605 constexpr static const bool IsException = true; \
Derek Lamberti013c3902019-10-21 10:46:16 +0100606 \
607 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
608 unsigned int nIn, unsigned int nOut) \
609 { \
Jan Eilers8eb25602020-03-09 12:13:48 +0000610 IgnoreUnused(factory, nIn, nOut); \
Derek Lamberti013c3902019-10-21 10:46:16 +0100611 return std::unique_ptr<armnn::IWorkload>(); \
612 } \
613};
614
615#define DECLARE_LAYER_POLICY_EXCEPTION_1_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, void)
616#define DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, armnn::name##Descriptor)
617
telsoa01c577f2c2018-08-31 09:22:23 +0100618// Layer policy template.
telsoa014fcda012018-03-09 14:13:49 +0000619template<armnn::LayerType Type, armnn::DataType DataType>
620struct LayerTypePolicy;
621
622// Every entry in the armnn::LayerType enum must be accounted for below.
623DECLARE_LAYER_POLICY_2_PARAM(Activation)
624
Mike Kelly2c14db62023-03-15 15:06:23 +0000625ARMNN_NO_DEPRECATE_WARN_BEGIN
telsoa014fcda012018-03-09 14:13:49 +0000626DECLARE_LAYER_POLICY_1_PARAM(Addition)
Mike Kelly2c14db62023-03-15 15:06:23 +0000627ARMNN_NO_DEPRECATE_WARN_END
telsoa014fcda012018-03-09 14:13:49 +0000628
Nikhil Rajee391d52019-09-05 17:50:44 +0100629DECLARE_LAYER_POLICY_2_PARAM(ArgMinMax)
630
Samuel Yap6b478092022-07-06 15:36:03 +0100631DECLARE_LAYER_POLICY_2_PARAM(BatchMatMul)
632
telsoa014fcda012018-03-09 14:13:49 +0000633DECLARE_LAYER_POLICY_2_PARAM(BatchNormalization)
634
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000635DECLARE_LAYER_POLICY_2_PARAM(BatchToSpaceNd)
636
Idriss Chaouch98e383e2023-08-28 14:28:31 +0100637DECLARE_LAYER_POLICY_2_PARAM(BroadcastTo)
638
mathad01b392e982021-04-07 12:07:30 +0100639DECLARE_LAYER_POLICY_1_PARAM(Cast)
640
Simon Obute51f67772021-09-03 15:50:13 +0100641DECLARE_LAYER_POLICY_2_PARAM(ChannelShuffle)
642
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100643DECLARE_LAYER_POLICY_2_PARAM(Comparison)
644
Jim Flynne242f2d2019-05-22 14:24:13 +0100645DECLARE_LAYER_POLICY_2_PARAM(Concat)
646
telsoa014fcda012018-03-09 14:13:49 +0000647DECLARE_LAYER_POLICY_1_PARAM(Constant)
648
telsoa01c577f2c2018-08-31 09:22:23 +0100649DECLARE_LAYER_POLICY_1_PARAM(ConvertFp16ToFp32)
650
651DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToFp16)
652
telsoa014fcda012018-03-09 14:13:49 +0000653DECLARE_LAYER_POLICY_2_PARAM(Convolution2d)
654
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100655DECLARE_LAYER_POLICY_2_PARAM(Convolution3d)
656
telsoa014fcda012018-03-09 14:13:49 +0000657DECLARE_LAYER_POLICY_1_PARAM(MemCopy)
658
Derek Lambertif674aa02019-08-01 15:56:25 +0100659DECLARE_LAYER_POLICY_1_PARAM(MemImport)
660
Nattapat Chaimanowong964e9552019-03-26 11:03:26 +0000661DECLARE_LAYER_POLICY_1_PARAM(Debug)
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000662
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100663DECLARE_LAYER_POLICY_2_PARAM(DepthToSpace)
664
telsoa014fcda012018-03-09 14:13:49 +0000665DECLARE_LAYER_POLICY_2_PARAM(DepthwiseConvolution2d)
666
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000667DECLARE_LAYER_POLICY_1_PARAM(Dequantize)
668
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000669DECLARE_LAYER_POLICY_2_PARAM(DetectionPostProcess)
670
Mike Kelly3ec30772023-03-08 13:47:17 +0000671DECLARE_LAYER_POLICY_2_PARAM(ElementwiseBinary)
672
josh minor4a3c6102020-01-06 16:40:46 -0600673DECLARE_LAYER_POLICY_2_PARAM(ElementwiseUnary)
674
telsoa014fcda012018-03-09 14:13:49 +0000675DECLARE_LAYER_POLICY_2_PARAM(FakeQuantization)
676
Ryan OSheaec6c6802020-06-05 17:17:06 +0100677DECLARE_LAYER_POLICY_2_PARAM(Fill)
678
telsoa014fcda012018-03-09 14:13:49 +0000679DECLARE_LAYER_POLICY_1_PARAM(Floor)
680
681DECLARE_LAYER_POLICY_2_PARAM(FullyConnected)
682
Teresa Charlin9145e382023-08-17 18:44:58 +0100683DECLARE_LAYER_POLICY_2_PARAM(Fused)
684
Teresa Charlin52664732020-06-29 16:27:03 +0100685DECLARE_LAYER_POLICY_2_PARAM(Gather)
narpra01b89b05f2019-01-16 09:53:09 +0000686
Teresa Charlinb2d3ec52022-04-12 22:07:09 +0100687DECLARE_LAYER_POLICY_1_PARAM(GatherNd)
688
telsoa014fcda012018-03-09 14:13:49 +0000689DECLARE_LAYER_POLICY_CUSTOM_PARAM(Input, armnn::LayerBindingId)
690
Kevin Mayce5045a2019-10-02 14:07:47 +0100691DECLARE_LAYER_POLICY_2_PARAM(InstanceNormalization)
692
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100693DECLARE_LAYER_POLICY_2_PARAM(L2Normalization)
telsoa014fcda012018-03-09 14:13:49 +0000694
James Conroyaba90cd2020-11-06 16:28:18 +0000695DECLARE_LAYER_POLICY_2_PARAM(LogicalBinary)
696
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100697DECLARE_LAYER_POLICY_2_PARAM(LogSoftmax)
698
telsoa01c577f2c2018-08-31 09:22:23 +0100699DECLARE_LAYER_POLICY_2_PARAM(Lstm)
700
Jim Flynn68db06f2020-10-06 10:14:50 +0100701DECLARE_LAYER_POLICY_MAP_PARAM(Map, void)
702
Mike Kelly2c14db62023-03-15 15:06:23 +0000703ARMNN_NO_DEPRECATE_WARN_BEGIN
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000704DECLARE_LAYER_POLICY_1_PARAM(Maximum)
Mike Kelly2c14db62023-03-15 15:06:23 +0000705ARMNN_NO_DEPRECATE_WARN_END
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000706
narpra0132b90462018-09-13 11:07:48 +0100707DECLARE_LAYER_POLICY_2_PARAM(Mean)
708
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100709DECLARE_LAYER_POLICY_1_PARAM(Merge)
710
Mike Kelly2c14db62023-03-15 15:06:23 +0000711ARMNN_NO_DEPRECATE_WARN_BEGIN
kevmay0190539692018-11-29 08:40:19 +0000712DECLARE_LAYER_POLICY_1_PARAM(Minimum)
Mike Kelly2c14db62023-03-15 15:06:23 +0000713ARMNN_NO_DEPRECATE_WARN_END
kevmay0190539692018-11-29 08:40:19 +0000714
Mike Kelly2c14db62023-03-15 15:06:23 +0000715ARMNN_NO_DEPRECATE_WARN_BEGIN
telsoa014fcda012018-03-09 14:13:49 +0000716DECLARE_LAYER_POLICY_1_PARAM(Multiplication)
Mike Kelly2c14db62023-03-15 15:06:23 +0000717ARMNN_NO_DEPRECATE_WARN_END
telsoa014fcda012018-03-09 14:13:49 +0000718
719DECLARE_LAYER_POLICY_2_PARAM(Normalization)
720
721DECLARE_LAYER_POLICY_CUSTOM_PARAM(Output, armnn::LayerBindingId)
722
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100723DECLARE_LAYER_POLICY_2_PARAM(Pad)
724
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000725DECLARE_LAYER_POLICY_1_PARAM(Quantize)
726
telsoa014fcda012018-03-09 14:13:49 +0000727DECLARE_LAYER_POLICY_2_PARAM(Permute)
728
729DECLARE_LAYER_POLICY_2_PARAM(Pooling2d)
730
Tamás Nyíri7b885b32021-10-26 14:47:57 +0100731DECLARE_LAYER_POLICY_2_PARAM(Pooling3d)
732
Matteo Martincigh49124022019-01-11 13:25:59 +0000733DECLARE_LAYER_POLICY_2_PARAM(PreCompiled)
734
Matteo Martincigh0e406ee2019-06-12 15:42:18 +0100735DECLARE_LAYER_POLICY_1_PARAM(Prelu)
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +0100736
James Conroy586a9aa2020-03-20 08:49:33 +0000737DECLARE_LAYER_POLICY_2_PARAM(QLstm)
738
James Conroyee18dc82019-07-17 11:27:46 +0100739DECLARE_LAYER_POLICY_1_PARAM(QuantizedLstm)
740
Mike Kelly2c14db62023-03-15 15:06:23 +0000741ARMNN_NO_DEPRECATE_WARN_BEGIN
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100742DECLARE_LAYER_POLICY_1_PARAM(Division)
Mike Kelly2c14db62023-03-15 15:06:23 +0000743ARMNN_NO_DEPRECATE_WARN_END
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100744
Finn Williams2605b232020-06-10 15:53:46 +0100745DECLARE_LAYER_POLICY_1_PARAM(Rank)
746
Teresa Charlin79a06a52023-07-13 17:16:45 +0100747DECLARE_LAYER_POLICY_2_PARAM(Reduce)
748
Teresa Charlina9075df2019-06-27 15:41:57 +0100749DECLARE_LAYER_POLICY_2_PARAM(Resize)
750
telsoa01c577f2c2018-08-31 09:22:23 +0100751DECLARE_LAYER_POLICY_2_PARAM(Reshape)
752
Teresa Charlin79a06a52023-07-13 17:16:45 +0100753DECLARE_LAYER_POLICY_1_PARAM(ReverseV2)
754
Keith Davis3ae3f972021-05-21 16:33:48 +0100755DECLARE_LAYER_POLICY_1_PARAM(Shape)
756
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100757DECLARE_LAYER_POLICY_2_PARAM(Slice)
758
telsoa014fcda012018-03-09 14:13:49 +0000759DECLARE_LAYER_POLICY_2_PARAM(Softmax)
760
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000761DECLARE_LAYER_POLICY_2_PARAM(SpaceToBatchNd)
762
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100763DECLARE_LAYER_POLICY_2_PARAM(SpaceToDepth)
764
telsoa014fcda012018-03-09 14:13:49 +0000765DECLARE_LAYER_POLICY_2_PARAM(Splitter)
766
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100767DECLARE_LAYER_POLICY_2_PARAM(Stack)
768
Derek Lamberti013c3902019-10-21 10:46:16 +0100769DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(StandIn)
770
Conor Kennedy430b5d82018-11-14 15:28:28 +0000771DECLARE_LAYER_POLICY_2_PARAM(StridedSlice)
772
Mike Kelly2c14db62023-03-15 15:06:23 +0000773ARMNN_NO_DEPRECATE_WARN_BEGIN
David Beckc2044fe2018-09-05 15:00:38 +0100774DECLARE_LAYER_POLICY_1_PARAM(Subtraction)
Mike Kelly2c14db62023-03-15 15:06:23 +0000775ARMNN_NO_DEPRECATE_WARN_END
telsoa014fcda012018-03-09 14:13:49 +0000776
Sadik Armaganeff363d2019-04-05 15:25:46 +0100777DECLARE_LAYER_POLICY_1_PARAM(Switch)
778
Teresa Charlin79a06a52023-07-13 17:16:45 +0100779DECLARE_LAYER_POLICY_2_PARAM(Tile)
780
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000781DECLARE_LAYER_POLICY_2_PARAM(Transpose)
782
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100783DECLARE_LAYER_POLICY_2_PARAM(TransposeConvolution2d)
784
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +0100785DECLARE_LAYER_POLICY_2_PARAM(UnidirectionalSequenceLstm)
786
Jim Flynn3a40ea52020-10-08 11:42:30 +0100787DECLARE_LAYER_POLICY_MAP_PARAM(Unmap, void)
788
telsoa014fcda012018-03-09 14:13:49 +0000789
790// Generic implementation to get the number of input slots for a given layer type;
791template<armnn::LayerType Type>
792unsigned int GetNumInputs(const armnn::Layer& layer)
793{
794 return layer.GetNumInputSlots();
795}
796
797// Generic implementation to get the number of output slots for a given layer type;
798template<armnn::LayerType Type>
799unsigned int GetNumOutputs(const armnn::Layer& layer)
800{
801 return layer.GetNumOutputSlots();
802}
803
telsoa014fcda012018-03-09 14:13:49 +0000804
telsoa01c577f2c2018-08-31 09:22:23 +0100805// Tests that the IsLayerSupported() function returns the correct value.
806// We determined the correct value by *trying* to create the relevant workload and seeing if it matches what we expect.
telsoa014fcda012018-03-09 14:13:49 +0000807// Returns true if expectations are met, otherwise returns false.
808template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
809bool IsLayerSupportedTest(FactoryType *factory, Tag<Type>)
810{
811 using LayerPolicy = LayerTypePolicy<Type, DataType>;
812 using LayerType = typename LayerPolicy::Type;
813 using LayerDesc = typename LayerPolicy::Desc;
814 DummyLayer<LayerType, LayerDesc> layer;
815
Derek Lambertib99ef392019-10-21 14:10:38 +0100816 if (LayerPolicy::IsException) //Don't test exceptions to the rule.
817 {
818 return true;
819 }
820
telsoa014fcda012018-03-09 14:13:49 +0000821 unsigned int numIn = GetNumInputs<Type>(*layer.m_Layer);
822 unsigned int numOut = GetNumOutputs<Type>(*layer.m_Layer);
823
telsoa01c577f2c2018-08-31 09:22:23 +0100824 // Make another dummy layer just to make IsLayerSupported have valid inputs.
telsoa014fcda012018-03-09 14:13:49 +0000825 DummyLayer<armnn::ConstantLayer, void> previousLayer;
telsoa01c577f2c2018-08-31 09:22:23 +0100826 // Set output of the previous layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000827 armnn::TensorInfo output = MakeDummyTensorInfo<DataType>();
828 previousLayer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
telsoa01c577f2c2018-08-31 09:22:23 +0100829 // Connect all outputs of the previous layer to inputs of tested layer.
Narumol Prangnawarat270641b2023-05-22 10:57:47 +0100830 for (unsigned int i = 0; i < numIn; ++i)
telsoa014fcda012018-03-09 14:13:49 +0000831 {
832 armnn::IOutputSlot& previousLayerOutputSlot = previousLayer.m_Layer->GetOutputSlot(0);
833 armnn::IInputSlot& layerInputSlot = layer.m_Layer->GetInputSlot(i);
834 previousLayerOutputSlot.Connect(layerInputSlot);
835 }
telsoa01c577f2c2018-08-31 09:22:23 +0100836 // Set outputs of tested layer to a dummy tensor.
Narumol Prangnawarat270641b2023-05-22 10:57:47 +0100837 for (unsigned int i = 0; i < numOut; ++i)
telsoa014fcda012018-03-09 14:13:49 +0000838 {
Narumol Prangnawarat270641b2023-05-22 10:57:47 +0100839 layer.m_Layer->GetOutputSlot(i).SetTensorInfo(output);
telsoa014fcda012018-03-09 14:13:49 +0000840 }
841
842 std::string layerName = LayerPolicy::NameStr;
843 std::string reasonIfUnsupported;
844 if (FactoryType::IsLayerSupported(*layer.m_Layer, DataType, reasonIfUnsupported))
845 {
846 std::string errorMsg = " layer expected support but found none.";
847 try
848 {
849 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() != nullptr;
Sadik Armagan1625efc2021-06-10 18:24:34 +0100850 CHECK_MESSAGE(retVal, layerName << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000851 return retVal;
852 }
telsoa01c577f2c2018-08-31 09:22:23 +0100853 catch(const armnn::InvalidArgumentException& e)
telsoa014fcda012018-03-09 14:13:49 +0000854 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000855 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000856 // This is ok since we throw InvalidArgumentException when creating the dummy workload.
857 return true;
858 }
859 catch(const std::exception& e)
860 {
861 errorMsg = e.what();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100862 FAIL(layerName << ": " << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000863 return false;
864 }
telsoa01c577f2c2018-08-31 09:22:23 +0100865 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000866 {
867 errorMsg = "Unexpected error while testing support for ";
Sadik Armagan1625efc2021-06-10 18:24:34 +0100868 FAIL(errorMsg << layerName);
telsoa014fcda012018-03-09 14:13:49 +0000869 return false;
870 }
871 }
872 else
873 {
874 std::string errorMsg = "layer expected no support (giving reason: " + reasonIfUnsupported + ") but found some.";
875 try
876 {
877 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() == nullptr;
Sadik Armagan1625efc2021-06-10 18:24:34 +0100878 CHECK_MESSAGE(retVal, layerName << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000879 return retVal;
880 }
881 // These two exceptions are ok: For workloads that are partially supported, attempting to instantiate them
882 // using parameters that make IsLayerSupported() return false should throw an
telsoa01c577f2c2018-08-31 09:22:23 +0100883 // InvalidArgumentException or UnimplementedException.
telsoa014fcda012018-03-09 14:13:49 +0000884 catch(const armnn::InvalidArgumentException& e)
885 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000886 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000887 return true;
888 }
telsoa01c577f2c2018-08-31 09:22:23 +0100889 catch(const armnn::UnimplementedException& e)
telsoa014fcda012018-03-09 14:13:49 +0000890 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000891 IgnoreUnused(e);
telsoa014fcda012018-03-09 14:13:49 +0000892 return true;
893 }
894 catch(const std::exception& e)
895 {
896 errorMsg = e.what();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100897 FAIL(layerName << ": " << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000898 return false;
899 }
telsoa01c577f2c2018-08-31 09:22:23 +0100900 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000901 {
902 errorMsg = "Unexpected error while testing support for ";
Sadik Armagan1625efc2021-06-10 18:24:34 +0100903 FAIL(errorMsg << layerName);
telsoa014fcda012018-03-09 14:13:49 +0000904 return false;
905 }
906 }
907}
908
Jim Flynn68db06f2020-10-06 10:14:50 +0100909template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
910bool IsLayerSupportedTest(FactoryType *factory, Tag<armnn::LayerType::Map>)
911{
912 IgnoreUnused(factory);
913 return true;
914}
915
Jim Flynn3a40ea52020-10-08 11:42:30 +0100916template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
917bool IsLayerSupportedTest(FactoryType *factory, Tag<armnn::LayerType::Unmap>)
918{
919 IgnoreUnused(factory);
920 return true;
921}
922
telsoa01c577f2c2018-08-31 09:22:23 +0100923// Helper function to compute the next type in the LayerType enum.
telsoa014fcda012018-03-09 14:13:49 +0000924constexpr armnn::LayerType NextType(armnn::LayerType type)
925{
926 return static_cast<armnn::LayerType>(static_cast<int>(type)+1);
927}
928
telsoa01c577f2c2018-08-31 09:22:23 +0100929// Termination function for determining the end of the LayerType enumeration.
telsoa014fcda012018-03-09 14:13:49 +0000930template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
931bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<armnn::LayerType::LastLayer>)
932{
933 return IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000934}
telsoa014fcda012018-03-09 14:13:49 +0000935
telsoa01c577f2c2018-08-31 09:22:23 +0100936// Recursive function to test and enter in the LayerType enum and then iterate on the next entry.
telsoa014fcda012018-03-09 14:13:49 +0000937template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
938bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<Type>)
939{
940 bool v = IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
941
942 return v &&
943 IsLayerSupportedTestsImpl<FactoryType, DataType, NextType(Type)>
944 (factory, Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000945}
telsoa014fcda012018-03-09 14:13:49 +0000946
947// Helper function to pass through to the test framework.
948template<typename FactoryType, armnn::DataType DataType>
949bool IsLayerSupportedTests(FactoryType *factory)
950{
951 return IsLayerSupportedTestsImpl<FactoryType, DataType>(factory, Tag<armnn::LayerType::FirstLayer>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000952}
telsoa014fcda012018-03-09 14:13:49 +0000953
954template<armnn::LayerType Type>
955bool TestLayerTypeMatches()
956{
957 using LayerPolicy = LayerTypePolicy<Type, armnn::DataType::Float32>;
958 using LayerType = typename LayerPolicy::Type;
959 using LayerDesc = typename LayerPolicy::Desc;
960 DummyLayer<LayerType, LayerDesc> layer;
961
962 std::stringstream ss;
963 ss << LayerPolicy::NameStr << " layer type mismatches expected layer type value.";
964 bool v = Type == layer.m_Layer->GetType();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100965 CHECK_MESSAGE(v, ss.str());
telsoa014fcda012018-03-09 14:13:49 +0000966 return v;
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000967}
telsoa014fcda012018-03-09 14:13:49 +0000968
969template<armnn::LayerType Type>
970bool LayerTypeMatchesTestImpl(Tag<armnn::LayerType::LastLayer>)
971{
972 return TestLayerTypeMatches<Type>();
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000973}
telsoa014fcda012018-03-09 14:13:49 +0000974
975template<armnn::LayerType Type>
976bool LayerTypeMatchesTestImpl(Tag<Type>)
977{
978 return TestLayerTypeMatches<Type>() &&
979 LayerTypeMatchesTestImpl<NextType(Type)>(Tag<NextType(Type)>());
Matteo Martincigh59a950c2018-12-13 12:48:25 +0000980}
telsoa014fcda012018-03-09 14:13:49 +0000981
telsoa01c577f2c2018-08-31 09:22:23 +0100982template<typename FactoryType, typename LayerType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
983bool IsConvertLayerSupportedTests(std::string& reasonIfUnsupported)
984{
985 armnn::Graph graph;
986 LayerType* const layer = graph.AddLayer<LayerType>("LayerName");
987
988 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
989 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
990
991 armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, InputDataType);
992 armnn::TensorInfo outputTensorInfo({1, 3, 2, 3}, OutputDataType);
993
994 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
995 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
996 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
997 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
998
999 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
1000
1001 return result;
Matteo Martincigh59a950c2018-12-13 12:48:25 +00001002}
telsoa01c577f2c2018-08-31 09:22:23 +01001003
Matthew Bentham1f0ff352019-01-02 13:26:31 +00001004template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
James Conroy177df1e2020-11-13 10:18:51 +00001005bool IsLogicalBinaryLayerSupportedTests(std::string& reasonIfUnsupported)
1006{
1007 armnn::Graph graph;
1008 armnn::LogicalBinaryDescriptor desc(armnn::LogicalBinaryOperation::LogicalOr);
1009
1010 armnn::Layer* const input0 = graph.AddLayer<armnn::InputLayer>(0, "input0");
1011 armnn::Layer* const input1 = graph.AddLayer<armnn::InputLayer>(1, "input1");
1012
1013 armnn::Layer* const layer = graph.AddLayer<armnn::LogicalBinaryLayer>(desc, "logicalOrLayer");
1014
1015 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output1");
1016
1017 armnn::TensorInfo inputTensorInfo0({1, 1, 1, 4}, InputDataType);
1018 armnn::TensorInfo inputTensorInfo1({1, 1, 1, 4}, InputDataType);
1019
1020 armnn::TensorInfo outputTensorInfo({1, 1, 1, 4}, OutputDataType);
1021
1022 input0->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
1023 input1->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
1024
1025 input0->GetOutputHandler(0).SetTensorInfo(inputTensorInfo0);
1026 input1->GetOutputHandler(0).SetTensorInfo(inputTensorInfo1);
1027
1028 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1029 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
1030
1031 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
1032
1033 return result;
1034}
1035
1036template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
1037bool IsLogicalBinaryLayerBroadcastSupportedTests(std::string& reasonIfUnsupported)
1038{
1039 armnn::Graph graph;
1040 armnn::LogicalBinaryDescriptor desc(armnn::LogicalBinaryOperation::LogicalAnd);
1041
1042 armnn::Layer* const input0 = graph.AddLayer<armnn::InputLayer>(0, "input0");
1043 armnn::Layer* const input1 = graph.AddLayer<armnn::InputLayer>(1, "input1");
1044
1045 armnn::Layer* const layer = graph.AddLayer<armnn::LogicalBinaryLayer>(desc, "logicalAndLayer");
1046
1047 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output2");
1048
1049 armnn::TensorInfo inputTensorInfo0({1, 1, 1, 4}, InputDataType);
1050 armnn::TensorInfo inputTensorInfo1({1, 1, 1, 1}, InputDataType);
1051
1052 armnn::TensorInfo outputTensorInfo({1, 1, 1, 4}, OutputDataType);
1053
1054 input0->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
1055 input1->GetOutputSlot(0).Connect(layer->GetInputSlot(1));
1056
1057 input0->GetOutputHandler(0).SetTensorInfo(inputTensorInfo0);
1058 input1->GetOutputHandler(0).SetTensorInfo(inputTensorInfo1);
1059
1060 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1061 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
1062
1063 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
1064
1065 return result;
1066}
1067
1068template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
Matthew Bentham1f0ff352019-01-02 13:26:31 +00001069bool IsMeanLayerSupportedTests(std::string& reasonIfUnsupported)
1070{
1071 armnn::Graph graph;
1072 static const std::vector<unsigned> axes = {1, 0};
1073 armnn::MeanDescriptor desc(axes, false);
1074
1075 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
1076
1077 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
1078 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
1079
1080 armnn::TensorInfo inputTensorInfo({4, 3, 2}, InputDataType);
1081 armnn::TensorInfo outputTensorInfo({2}, OutputDataType);
1082
1083 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
1084 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
1085 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1086 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
1087
1088 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
1089
1090 return result;
1091}
1092
James Conroy4d1ff582019-06-10 17:06:39 +01001093// Tests that IsMeanSupported fails when input tensor dimensions
1094// do not match output tensor dimensions when keepDims == true
1095template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
1096bool IsMeanLayerNotSupportedTests(std::string& reasonIfUnsupported)
1097{
1098 armnn::Graph graph;
1099 static const std::vector<unsigned> axes = {};
1100 // Set keepDims == true
1101 armnn::MeanDescriptor desc(axes, true);
1102
1103 armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName");
1104
1105 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
1106 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
1107
1108 // Mismatching number of tensor dimensions
1109 armnn::TensorInfo inputTensorInfo({1, 1, 1, 1}, InputDataType);
1110 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
1111
1112 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
1113 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
1114 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1115 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
1116
1117 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
1118
1119 return result;
1120}
1121
Mike Kelly0886ac42020-04-27 09:55:40 +01001122template<typename FactoryType, armnn::DataType OutputDataType>
1123bool IsConstantLayerSupportedTests(std::string& reasonIfUnsupported)
1124{
1125 armnn::Graph graph;
1126
1127 armnn::Layer* const layer = graph.AddLayer<armnn::ConstantLayer>("ConstantLayerName");
1128 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "OutputLayerName");
1129
1130 armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType);
1131
1132 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
1133 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
1134
1135 bool result = FactoryType::IsLayerSupported(*layer, OutputDataType, reasonIfUnsupported);
1136
1137 return result;
1138}
Matthew Bentham1f0ff352019-01-02 13:26:31 +00001139
telsoa014fcda012018-03-09 14:13:49 +00001140} //namespace