blob: 1bcf56dc611302b221abb3118a8b7ecb4f5722ef [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
7#include "Graph.hpp"
8
9#include <boost/core/ignore_unused.hpp>
10
11namespace
12{
13armnn::Graph dummyGraph;
14
telsoa01c577f2c2018-08-31 09:22:23 +010015// Make a dummy TensorInfo object.
telsoa014fcda012018-03-09 14:13:49 +000016template<armnn::DataType DataType>
17armnn::TensorInfo MakeDummyTensorInfo()
18{
19 return armnn::TensorInfo({2,2,2,2}, DataType);
20}
21
22
23// Make a dummy WorkloadInfo using a dummy TensorInfo.
24template<armnn::DataType DataType>
25armnn::WorkloadInfo MakeDummyWorkloadInfo(unsigned int numInputs, unsigned int numOutputs)
26{
27 armnn::WorkloadInfo info;
28 for (unsigned int i=0; i < numInputs; i++)
29 {
30 info.m_InputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
31 }
32 for (unsigned int o=0; o < numOutputs; o++)
33 {
34 info.m_OutputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
35 }
36 return info;
37}
38
telsoa01c577f2c2018-08-31 09:22:23 +010039// Template class to create a dummy layer (2 parameters).
telsoa014fcda012018-03-09 14:13:49 +000040template<typename LayerType, typename DescType = typename LayerType::DescriptorType>
41struct DummyLayer
42{
43 DummyLayer()
44 {
45 m_Layer = dummyGraph.AddLayer<LayerType>(DescType(), "");
46 }
47 ~DummyLayer()
48 {
49 dummyGraph.EraseLayer(m_Layer);
50 }
51 LayerType* m_Layer;
52};
53
telsoa01c577f2c2018-08-31 09:22:23 +010054// Template class to create a dummy layer (1 parameter).
telsoa014fcda012018-03-09 14:13:49 +000055template<typename LayerType>
56struct DummyLayer<LayerType, void>
57{
58 DummyLayer()
59 {
60 m_Layer = dummyGraph.AddLayer<LayerType>("");
61 }
62 ~DummyLayer()
63 {
64 dummyGraph.EraseLayer(m_Layer);
65 }
66 LayerType* m_Layer;
67};
68
69template<>
telsoa01c577f2c2018-08-31 09:22:23 +010070struct DummyLayer<armnn::BatchNormalizationLayer>
71{
72 DummyLayer()
73 {
74 m_Layer = dummyGraph.AddLayer<armnn::BatchNormalizationLayer>(armnn::BatchNormalizationDescriptor(), "");
75 m_Layer->m_Mean = std::make_unique<armnn::ScopedCpuTensorHandle>(
76 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
77 m_Layer->m_Variance = std::make_unique<armnn::ScopedCpuTensorHandle>(
78 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
79 m_Layer->m_Beta = std::make_unique<armnn::ScopedCpuTensorHandle>(
80 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
81 m_Layer->m_Gamma = std::make_unique<armnn::ScopedCpuTensorHandle>(
82 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
83 }
84 ~DummyLayer()
85 {
86 dummyGraph.EraseLayer(m_Layer);
87 }
88 armnn::BatchNormalizationLayer* m_Layer;
89
90};
91
92template<>
telsoa014fcda012018-03-09 14:13:49 +000093struct DummyLayer<armnn::ConstantLayer, void>
94{
95 DummyLayer()
96 {
telsoa01c577f2c2018-08-31 09:22:23 +010097 m_Layer = dummyGraph.AddLayer<armnn::ConstantLayer>("");
telsoa014fcda012018-03-09 14:13:49 +000098 }
99 ~DummyLayer()
100 {
101 dummyGraph.EraseLayer(m_Layer);
102 }
103 armnn::ConstantLayer* m_Layer;
104};
105
106template<>
107struct DummyLayer<armnn::InputLayer, armnn::LayerBindingId>
108{
109 DummyLayer()
110 {
111 m_Layer = dummyGraph.AddLayer<armnn::InputLayer>(armnn::LayerBindingId(), "");
112
113 }
114 ~DummyLayer()
115 {
116 dummyGraph.EraseLayer(m_Layer);
117 }
118 armnn::InputLayer* m_Layer;
119};
120
121template<>
122struct DummyLayer<armnn::MergerLayer>
123{
124 DummyLayer()
125 {
126 armnn::OriginsDescriptor desc(2);
127 m_Layer = dummyGraph.AddLayer<armnn::MergerLayer>(desc, "");
128
129 }
130 ~DummyLayer()
131 {
132 dummyGraph.EraseLayer(m_Layer);
133 }
134 armnn::MergerLayer* m_Layer;
135};
136
137template<>
138struct DummyLayer<armnn::OutputLayer, armnn::LayerBindingId>
139{
140 DummyLayer()
141 {
142 m_Layer = dummyGraph.AddLayer<armnn::OutputLayer>(armnn::LayerBindingId(), "");
143
144 }
145 ~DummyLayer()
146 {
147 dummyGraph.EraseLayer(m_Layer);
148 }
149 armnn::OutputLayer* m_Layer;
150};
151
152template<>
153struct DummyLayer<armnn::SplitterLayer>
154{
155 DummyLayer()
156 {
157 armnn::ViewsDescriptor desc(1);
158 m_Layer = dummyGraph.AddLayer<armnn::SplitterLayer>(desc, "");
159
160 }
161 ~DummyLayer()
162 {
163 dummyGraph.EraseLayer(m_Layer);
164 }
165 armnn::SplitterLayer* m_Layer;
166};
167
168template <typename ConvolutionLayerType>
169struct DummyConvolutionLayer
170{
171 DummyConvolutionLayer()
172 {
173 typename ConvolutionLayerType::DescriptorType desc;
174 m_Layer = dummyGraph.AddLayer<ConvolutionLayerType>(desc, "");
175 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
176 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
177 m_Layer->m_Bias = std::make_unique<armnn::ScopedCpuTensorHandle>(
178 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
179 }
180 ~DummyConvolutionLayer()
181 {
182 dummyGraph.EraseLayer(m_Layer);
183 }
184 ConvolutionLayerType* m_Layer;
185};
186
187template<>
188struct DummyLayer<armnn::Convolution2dLayer>
189 : public DummyConvolutionLayer<armnn::Convolution2dLayer>
190{
191};
192
193template<>
194struct DummyLayer<armnn::DepthwiseConvolution2dLayer>
195 : public DummyConvolutionLayer<armnn::DepthwiseConvolution2dLayer>
196{
197};
198
telsoa01c577f2c2018-08-31 09:22:23 +0100199template <typename LstmLayerType>
200struct DummyLstmLayer
201{
202 DummyLstmLayer()
203 {
204 typename LstmLayerType::DescriptorType desc;
205 desc.m_CifgEnabled = false;
206
207 m_Layer = dummyGraph.AddLayer<LstmLayerType>(armnn::LstmDescriptor(), "");
208 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
209 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
210 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
211 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
212 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
213 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
214 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
215 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
216 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
217 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
218 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
219 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
220 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
221 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
222 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
223 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
224 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
225 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
226
227 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
228 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
229 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
230 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
231 m_Layer->m_CifgParameters.m_CellToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
232 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
233 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
234 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
235 }
236 ~DummyLstmLayer()
237 {
238 dummyGraph.EraseLayer(m_Layer);
239 }
240 armnn::LstmLayer* m_Layer;
241};
242
243template<>
244struct DummyLayer<armnn::LstmLayer>
245 : public DummyLstmLayer<armnn::LstmLayer>
246{
247};
248
249template<>
250struct DummyLayer<armnn::FullyConnectedLayer>
251{
252 DummyLayer()
253 {
254 armnn::FullyConnectedLayer::DescriptorType desc;
255 m_Layer = dummyGraph.AddLayer<armnn::FullyConnectedLayer>(desc, "");
256 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
257 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
258 }
259 ~DummyLayer()
260 {
261 dummyGraph.EraseLayer(m_Layer);
262 }
263 armnn::FullyConnectedLayer* m_Layer;
264};
265
telsoa014fcda012018-03-09 14:13:49 +0000266// Tag for giving LayerType entries a unique strong type each.
267template<armnn::LayerType>
268struct Tag{};
269
270#define DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, descType) \
271template<armnn::DataType DataType> \
272struct LayerTypePolicy<armnn::LayerType::name, DataType> \
273{ \
274 using Type = armnn::name##Layer; \
275 using Desc = descType; \
276 using QueueDesc = armnn::name##QueueDescriptor; \
277 constexpr static const char* NameStr = #name; \
278 \
279 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
280 unsigned int nIn, unsigned int nOut) \
281 { \
282 QueueDesc desc; \
283 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
284 return factory->Create##name(desc, info); \
285 } \
286};
287
telsoa01c577f2c2018-08-31 09:22:23 +0100288// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000289// Use this version for layers whose constructor takes 1 parameter(name).
290#define DECLARE_LAYER_POLICY_1_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, void)
291
telsoa01c577f2c2018-08-31 09:22:23 +0100292// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000293// Use this version for layers whose constructor takes 2 parameters(descriptor and name).
294#define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor)
295
telsoa01c577f2c2018-08-31 09:22:23 +0100296// Layer policy template.
telsoa014fcda012018-03-09 14:13:49 +0000297template<armnn::LayerType Type, armnn::DataType DataType>
298struct LayerTypePolicy;
299
300// Every entry in the armnn::LayerType enum must be accounted for below.
301DECLARE_LAYER_POLICY_2_PARAM(Activation)
302
303DECLARE_LAYER_POLICY_1_PARAM(Addition)
304
305DECLARE_LAYER_POLICY_2_PARAM(BatchNormalization)
306
307DECLARE_LAYER_POLICY_1_PARAM(Constant)
308
telsoa01c577f2c2018-08-31 09:22:23 +0100309DECLARE_LAYER_POLICY_1_PARAM(ConvertFp16ToFp32)
310
311DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToFp16)
312
telsoa014fcda012018-03-09 14:13:49 +0000313DECLARE_LAYER_POLICY_2_PARAM(Convolution2d)
314
315DECLARE_LAYER_POLICY_1_PARAM(MemCopy)
316
317DECLARE_LAYER_POLICY_2_PARAM(DepthwiseConvolution2d)
318
319DECLARE_LAYER_POLICY_2_PARAM(FakeQuantization)
320
321DECLARE_LAYER_POLICY_1_PARAM(Floor)
322
323DECLARE_LAYER_POLICY_2_PARAM(FullyConnected)
324
325DECLARE_LAYER_POLICY_CUSTOM_PARAM(Input, armnn::LayerBindingId)
326
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100327DECLARE_LAYER_POLICY_2_PARAM(L2Normalization)
telsoa014fcda012018-03-09 14:13:49 +0000328
telsoa01c577f2c2018-08-31 09:22:23 +0100329DECLARE_LAYER_POLICY_2_PARAM(Lstm)
330
narpra0132b90462018-09-13 11:07:48 +0100331DECLARE_LAYER_POLICY_2_PARAM(Mean)
332
telsoa014fcda012018-03-09 14:13:49 +0000333DECLARE_LAYER_POLICY_2_PARAM(Merger)
334
335DECLARE_LAYER_POLICY_1_PARAM(Multiplication)
336
337DECLARE_LAYER_POLICY_2_PARAM(Normalization)
338
339DECLARE_LAYER_POLICY_CUSTOM_PARAM(Output, armnn::LayerBindingId)
340
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100341DECLARE_LAYER_POLICY_2_PARAM(Pad)
342
telsoa014fcda012018-03-09 14:13:49 +0000343DECLARE_LAYER_POLICY_2_PARAM(Permute)
344
345DECLARE_LAYER_POLICY_2_PARAM(Pooling2d)
346
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100347DECLARE_LAYER_POLICY_1_PARAM(Division)
348
telsoa014fcda012018-03-09 14:13:49 +0000349DECLARE_LAYER_POLICY_2_PARAM(ResizeBilinear)
350
telsoa01c577f2c2018-08-31 09:22:23 +0100351DECLARE_LAYER_POLICY_2_PARAM(Reshape)
352
telsoa014fcda012018-03-09 14:13:49 +0000353DECLARE_LAYER_POLICY_2_PARAM(Softmax)
354
355DECLARE_LAYER_POLICY_2_PARAM(Splitter)
356
David Beckc2044fe2018-09-05 15:00:38 +0100357DECLARE_LAYER_POLICY_1_PARAM(Subtraction)
telsoa014fcda012018-03-09 14:13:49 +0000358
359
360// Generic implementation to get the number of input slots for a given layer type;
361template<armnn::LayerType Type>
362unsigned int GetNumInputs(const armnn::Layer& layer)
363{
364 return layer.GetNumInputSlots();
365}
366
367// Generic implementation to get the number of output slots for a given layer type;
368template<armnn::LayerType Type>
369unsigned int GetNumOutputs(const armnn::Layer& layer)
370{
371 return layer.GetNumOutputSlots();
372}
373
374template<>
375unsigned int GetNumInputs<armnn::LayerType::Merger>(const armnn::Layer& layer)
376{
377 boost::ignore_unused(layer);
378 return 2;
379}
380
telsoa01c577f2c2018-08-31 09:22:23 +0100381// Tests that the IsLayerSupported() function returns the correct value.
382// We determined the correct value by *trying* to create the relevant workload and seeing if it matches what we expect.
telsoa014fcda012018-03-09 14:13:49 +0000383// Returns true if expectations are met, otherwise returns false.
384template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
385bool IsLayerSupportedTest(FactoryType *factory, Tag<Type>)
386{
387 using LayerPolicy = LayerTypePolicy<Type, DataType>;
388 using LayerType = typename LayerPolicy::Type;
389 using LayerDesc = typename LayerPolicy::Desc;
390 DummyLayer<LayerType, LayerDesc> layer;
391
392 unsigned int numIn = GetNumInputs<Type>(*layer.m_Layer);
393 unsigned int numOut = GetNumOutputs<Type>(*layer.m_Layer);
394
telsoa01c577f2c2018-08-31 09:22:23 +0100395 // Make another dummy layer just to make IsLayerSupported have valid inputs.
telsoa014fcda012018-03-09 14:13:49 +0000396 DummyLayer<armnn::ConstantLayer, void> previousLayer;
telsoa01c577f2c2018-08-31 09:22:23 +0100397 // Set output of the previous layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000398 armnn::TensorInfo output = MakeDummyTensorInfo<DataType>();
399 previousLayer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
telsoa01c577f2c2018-08-31 09:22:23 +0100400 // Connect all outputs of the previous layer to inputs of tested layer.
telsoa014fcda012018-03-09 14:13:49 +0000401 for (unsigned int i = 0; i < numIn; i++)
402 {
403 armnn::IOutputSlot& previousLayerOutputSlot = previousLayer.m_Layer->GetOutputSlot(0);
404 armnn::IInputSlot& layerInputSlot = layer.m_Layer->GetInputSlot(i);
405 previousLayerOutputSlot.Connect(layerInputSlot);
406 }
telsoa01c577f2c2018-08-31 09:22:23 +0100407 // Set outputs of tested layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000408 for (unsigned int i = 0; i < numOut; i++)
409 {
410 layer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
411 }
412
413 std::string layerName = LayerPolicy::NameStr;
414 std::string reasonIfUnsupported;
415 if (FactoryType::IsLayerSupported(*layer.m_Layer, DataType, reasonIfUnsupported))
416 {
417 std::string errorMsg = " layer expected support but found none.";
418 try
419 {
420 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() != nullptr;
Matteo Martincighfbebcbd2018-10-16 09:45:08 +0100421 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000422 return retVal;
423 }
telsoa01c577f2c2018-08-31 09:22:23 +0100424 catch(const armnn::InvalidArgumentException& e)
telsoa014fcda012018-03-09 14:13:49 +0000425 {
426 boost::ignore_unused(e);
427 // This is ok since we throw InvalidArgumentException when creating the dummy workload.
428 return true;
429 }
430 catch(const std::exception& e)
431 {
432 errorMsg = e.what();
433 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
434 return false;
435 }
telsoa01c577f2c2018-08-31 09:22:23 +0100436 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000437 {
438 errorMsg = "Unexpected error while testing support for ";
439 BOOST_TEST_ERROR(errorMsg << layerName);
440 return false;
441 }
442 }
443 else
444 {
445 std::string errorMsg = "layer expected no support (giving reason: " + reasonIfUnsupported + ") but found some.";
446 try
447 {
448 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() == nullptr;
449 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
450 return retVal;
451 }
452 // These two exceptions are ok: For workloads that are partially supported, attempting to instantiate them
453 // using parameters that make IsLayerSupported() return false should throw an
telsoa01c577f2c2018-08-31 09:22:23 +0100454 // InvalidArgumentException or UnimplementedException.
telsoa014fcda012018-03-09 14:13:49 +0000455 catch(const armnn::InvalidArgumentException& e)
456 {
457 boost::ignore_unused(e);
458 return true;
459 }
telsoa01c577f2c2018-08-31 09:22:23 +0100460 catch(const armnn::UnimplementedException& e)
telsoa014fcda012018-03-09 14:13:49 +0000461 {
462 boost::ignore_unused(e);
463 return true;
464 }
465 catch(const std::exception& e)
466 {
467 errorMsg = e.what();
468 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
469 return false;
470 }
telsoa01c577f2c2018-08-31 09:22:23 +0100471 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000472 {
473 errorMsg = "Unexpected error while testing support for ";
474 BOOST_TEST_ERROR(errorMsg << layerName);
475 return false;
476 }
477 }
478}
479
telsoa01c577f2c2018-08-31 09:22:23 +0100480// Helper function to compute the next type in the LayerType enum.
telsoa014fcda012018-03-09 14:13:49 +0000481constexpr armnn::LayerType NextType(armnn::LayerType type)
482{
483 return static_cast<armnn::LayerType>(static_cast<int>(type)+1);
484}
485
telsoa01c577f2c2018-08-31 09:22:23 +0100486// Termination function for determining the end of the LayerType enumeration.
telsoa014fcda012018-03-09 14:13:49 +0000487template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
488bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<armnn::LayerType::LastLayer>)
489{
490 return IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
491};
492
telsoa01c577f2c2018-08-31 09:22:23 +0100493// Recursive function to test and enter in the LayerType enum and then iterate on the next entry.
telsoa014fcda012018-03-09 14:13:49 +0000494template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
495bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<Type>)
496{
497 bool v = IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
498
499 return v &&
500 IsLayerSupportedTestsImpl<FactoryType, DataType, NextType(Type)>
501 (factory, Tag<NextType(Type)>());
502};
503
504// Helper function to pass through to the test framework.
505template<typename FactoryType, armnn::DataType DataType>
506bool IsLayerSupportedTests(FactoryType *factory)
507{
508 return IsLayerSupportedTestsImpl<FactoryType, DataType>(factory, Tag<armnn::LayerType::FirstLayer>());
509};
510
511template<armnn::LayerType Type>
512bool TestLayerTypeMatches()
513{
514 using LayerPolicy = LayerTypePolicy<Type, armnn::DataType::Float32>;
515 using LayerType = typename LayerPolicy::Type;
516 using LayerDesc = typename LayerPolicy::Desc;
517 DummyLayer<LayerType, LayerDesc> layer;
518
519 std::stringstream ss;
520 ss << LayerPolicy::NameStr << " layer type mismatches expected layer type value.";
521 bool v = Type == layer.m_Layer->GetType();
522 BOOST_CHECK_MESSAGE(v, ss.str());
523 return v;
524};
525
526template<armnn::LayerType Type>
527bool LayerTypeMatchesTestImpl(Tag<armnn::LayerType::LastLayer>)
528{
529 return TestLayerTypeMatches<Type>();
530};
531
532template<armnn::LayerType Type>
533bool LayerTypeMatchesTestImpl(Tag<Type>)
534{
535 return TestLayerTypeMatches<Type>() &&
536 LayerTypeMatchesTestImpl<NextType(Type)>(Tag<NextType(Type)>());
537};
538
telsoa01c577f2c2018-08-31 09:22:23 +0100539template<typename FactoryType, typename LayerType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
540bool IsConvertLayerSupportedTests(std::string& reasonIfUnsupported)
541{
542 armnn::Graph graph;
543 LayerType* const layer = graph.AddLayer<LayerType>("LayerName");
544
545 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
546 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
547
548 armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, InputDataType);
549 armnn::TensorInfo outputTensorInfo({1, 3, 2, 3}, OutputDataType);
550
551 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
552 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
553 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
554 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
555
556 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
557
558 return result;
559};
560
telsoa014fcda012018-03-09 14:13:49 +0000561} //namespace