blob: e166513fc63a94f62f42b8d42dd5dc67cd5ab9b5 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
7#include "Graph.hpp"
8
9#include <boost/core/ignore_unused.hpp>
10
11namespace
12{
13armnn::Graph dummyGraph;
14
telsoa01c577f2c2018-08-31 09:22:23 +010015// Make a dummy TensorInfo object.
telsoa014fcda012018-03-09 14:13:49 +000016template<armnn::DataType DataType>
17armnn::TensorInfo MakeDummyTensorInfo()
18{
19 return armnn::TensorInfo({2,2,2,2}, DataType);
20}
21
22
23// Make a dummy WorkloadInfo using a dummy TensorInfo.
24template<armnn::DataType DataType>
25armnn::WorkloadInfo MakeDummyWorkloadInfo(unsigned int numInputs, unsigned int numOutputs)
26{
27 armnn::WorkloadInfo info;
28 for (unsigned int i=0; i < numInputs; i++)
29 {
30 info.m_InputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
31 }
32 for (unsigned int o=0; o < numOutputs; o++)
33 {
34 info.m_OutputTensorInfos.push_back(MakeDummyTensorInfo<DataType>());
35 }
36 return info;
37}
38
telsoa01c577f2c2018-08-31 09:22:23 +010039// Template class to create a dummy layer (2 parameters).
telsoa014fcda012018-03-09 14:13:49 +000040template<typename LayerType, typename DescType = typename LayerType::DescriptorType>
41struct DummyLayer
42{
43 DummyLayer()
44 {
45 m_Layer = dummyGraph.AddLayer<LayerType>(DescType(), "");
46 }
47 ~DummyLayer()
48 {
49 dummyGraph.EraseLayer(m_Layer);
50 }
51 LayerType* m_Layer;
52};
53
telsoa01c577f2c2018-08-31 09:22:23 +010054// Template class to create a dummy layer (1 parameter).
telsoa014fcda012018-03-09 14:13:49 +000055template<typename LayerType>
56struct DummyLayer<LayerType, void>
57{
58 DummyLayer()
59 {
60 m_Layer = dummyGraph.AddLayer<LayerType>("");
61 }
62 ~DummyLayer()
63 {
64 dummyGraph.EraseLayer(m_Layer);
65 }
66 LayerType* m_Layer;
67};
68
69template<>
telsoa01c577f2c2018-08-31 09:22:23 +010070struct DummyLayer<armnn::BatchNormalizationLayer>
71{
72 DummyLayer()
73 {
74 m_Layer = dummyGraph.AddLayer<armnn::BatchNormalizationLayer>(armnn::BatchNormalizationDescriptor(), "");
75 m_Layer->m_Mean = std::make_unique<armnn::ScopedCpuTensorHandle>(
76 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
77 m_Layer->m_Variance = std::make_unique<armnn::ScopedCpuTensorHandle>(
78 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
79 m_Layer->m_Beta = std::make_unique<armnn::ScopedCpuTensorHandle>(
80 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
81 m_Layer->m_Gamma = std::make_unique<armnn::ScopedCpuTensorHandle>(
82 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
83 }
84 ~DummyLayer()
85 {
86 dummyGraph.EraseLayer(m_Layer);
87 }
88 armnn::BatchNormalizationLayer* m_Layer;
89
90};
91
92template<>
telsoa014fcda012018-03-09 14:13:49 +000093struct DummyLayer<armnn::ConstantLayer, void>
94{
95 DummyLayer()
96 {
telsoa01c577f2c2018-08-31 09:22:23 +010097 m_Layer = dummyGraph.AddLayer<armnn::ConstantLayer>("");
telsoa014fcda012018-03-09 14:13:49 +000098 }
99 ~DummyLayer()
100 {
101 dummyGraph.EraseLayer(m_Layer);
102 }
103 armnn::ConstantLayer* m_Layer;
104};
105
106template<>
107struct DummyLayer<armnn::InputLayer, armnn::LayerBindingId>
108{
109 DummyLayer()
110 {
111 m_Layer = dummyGraph.AddLayer<armnn::InputLayer>(armnn::LayerBindingId(), "");
112
113 }
114 ~DummyLayer()
115 {
116 dummyGraph.EraseLayer(m_Layer);
117 }
118 armnn::InputLayer* m_Layer;
119};
120
121template<>
122struct DummyLayer<armnn::MergerLayer>
123{
124 DummyLayer()
125 {
126 armnn::OriginsDescriptor desc(2);
127 m_Layer = dummyGraph.AddLayer<armnn::MergerLayer>(desc, "");
128
129 }
130 ~DummyLayer()
131 {
132 dummyGraph.EraseLayer(m_Layer);
133 }
134 armnn::MergerLayer* m_Layer;
135};
136
137template<>
138struct DummyLayer<armnn::OutputLayer, armnn::LayerBindingId>
139{
140 DummyLayer()
141 {
142 m_Layer = dummyGraph.AddLayer<armnn::OutputLayer>(armnn::LayerBindingId(), "");
143
144 }
145 ~DummyLayer()
146 {
147 dummyGraph.EraseLayer(m_Layer);
148 }
149 armnn::OutputLayer* m_Layer;
150};
151
152template<>
153struct DummyLayer<armnn::SplitterLayer>
154{
155 DummyLayer()
156 {
157 armnn::ViewsDescriptor desc(1);
158 m_Layer = dummyGraph.AddLayer<armnn::SplitterLayer>(desc, "");
159
160 }
161 ~DummyLayer()
162 {
163 dummyGraph.EraseLayer(m_Layer);
164 }
165 armnn::SplitterLayer* m_Layer;
166};
167
168template <typename ConvolutionLayerType>
169struct DummyConvolutionLayer
170{
171 DummyConvolutionLayer()
172 {
173 typename ConvolutionLayerType::DescriptorType desc;
174 m_Layer = dummyGraph.AddLayer<ConvolutionLayerType>(desc, "");
175 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
176 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
177 m_Layer->m_Bias = std::make_unique<armnn::ScopedCpuTensorHandle>(
178 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
179 }
180 ~DummyConvolutionLayer()
181 {
182 dummyGraph.EraseLayer(m_Layer);
183 }
184 ConvolutionLayerType* m_Layer;
185};
186
187template<>
188struct DummyLayer<armnn::Convolution2dLayer>
189 : public DummyConvolutionLayer<armnn::Convolution2dLayer>
190{
191};
192
193template<>
194struct DummyLayer<armnn::DepthwiseConvolution2dLayer>
195 : public DummyConvolutionLayer<armnn::DepthwiseConvolution2dLayer>
196{
197};
198
telsoa01c577f2c2018-08-31 09:22:23 +0100199template <typename LstmLayerType>
200struct DummyLstmLayer
201{
202 DummyLstmLayer()
203 {
204 typename LstmLayerType::DescriptorType desc;
205 desc.m_CifgEnabled = false;
206
207 m_Layer = dummyGraph.AddLayer<LstmLayerType>(armnn::LstmDescriptor(), "");
208 m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
209 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
210 m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
211 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
212 m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
213 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
214 m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
215 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
216 m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
217 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
218 m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
219 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
220 m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
221 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
222 m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
223 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
224 m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
225 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
226
227 m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
228 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
229 m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
230 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
231 m_Layer->m_CifgParameters.m_CellToInputWeights = std::make_unique<armnn::ScopedCpuTensorHandle>(
232 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
233 m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedCpuTensorHandle>(
234 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
235 }
236 ~DummyLstmLayer()
237 {
238 dummyGraph.EraseLayer(m_Layer);
239 }
240 armnn::LstmLayer* m_Layer;
241};
242
243template<>
244struct DummyLayer<armnn::LstmLayer>
245 : public DummyLstmLayer<armnn::LstmLayer>
246{
247};
248
249template<>
250struct DummyLayer<armnn::FullyConnectedLayer>
251{
252 DummyLayer()
253 {
254 armnn::FullyConnectedLayer::DescriptorType desc;
255 m_Layer = dummyGraph.AddLayer<armnn::FullyConnectedLayer>(desc, "");
256 m_Layer->m_Weight = std::make_unique<armnn::ScopedCpuTensorHandle>(
257 armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32));
258 }
259 ~DummyLayer()
260 {
261 dummyGraph.EraseLayer(m_Layer);
262 }
263 armnn::FullyConnectedLayer* m_Layer;
264};
265
telsoa014fcda012018-03-09 14:13:49 +0000266// Tag for giving LayerType entries a unique strong type each.
267template<armnn::LayerType>
268struct Tag{};
269
270#define DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, descType) \
271template<armnn::DataType DataType> \
272struct LayerTypePolicy<armnn::LayerType::name, DataType> \
273{ \
274 using Type = armnn::name##Layer; \
275 using Desc = descType; \
276 using QueueDesc = armnn::name##QueueDescriptor; \
277 constexpr static const char* NameStr = #name; \
278 \
279 static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \
280 unsigned int nIn, unsigned int nOut) \
281 { \
282 QueueDesc desc; \
283 armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \
284 return factory->Create##name(desc, info); \
285 } \
286};
287
telsoa01c577f2c2018-08-31 09:22:23 +0100288// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000289// Use this version for layers whose constructor takes 1 parameter(name).
290#define DECLARE_LAYER_POLICY_1_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, void)
291
telsoa01c577f2c2018-08-31 09:22:23 +0100292// Define a layer policy specialization for use with the IsLayerSupported tests.
telsoa014fcda012018-03-09 14:13:49 +0000293// Use this version for layers whose constructor takes 2 parameters(descriptor and name).
294#define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor)
295
telsoa01c577f2c2018-08-31 09:22:23 +0100296// Layer policy template.
telsoa014fcda012018-03-09 14:13:49 +0000297template<armnn::LayerType Type, armnn::DataType DataType>
298struct LayerTypePolicy;
299
300// Every entry in the armnn::LayerType enum must be accounted for below.
301DECLARE_LAYER_POLICY_2_PARAM(Activation)
302
303DECLARE_LAYER_POLICY_1_PARAM(Addition)
304
305DECLARE_LAYER_POLICY_2_PARAM(BatchNormalization)
306
307DECLARE_LAYER_POLICY_1_PARAM(Constant)
308
telsoa01c577f2c2018-08-31 09:22:23 +0100309DECLARE_LAYER_POLICY_1_PARAM(ConvertFp16ToFp32)
310
311DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToFp16)
312
telsoa014fcda012018-03-09 14:13:49 +0000313DECLARE_LAYER_POLICY_2_PARAM(Convolution2d)
314
315DECLARE_LAYER_POLICY_1_PARAM(MemCopy)
316
317DECLARE_LAYER_POLICY_2_PARAM(DepthwiseConvolution2d)
318
319DECLARE_LAYER_POLICY_2_PARAM(FakeQuantization)
320
321DECLARE_LAYER_POLICY_1_PARAM(Floor)
322
323DECLARE_LAYER_POLICY_2_PARAM(FullyConnected)
324
325DECLARE_LAYER_POLICY_CUSTOM_PARAM(Input, armnn::LayerBindingId)
326
327DECLARE_LAYER_POLICY_1_PARAM(L2Normalization)
328
telsoa01c577f2c2018-08-31 09:22:23 +0100329DECLARE_LAYER_POLICY_2_PARAM(Lstm)
330
narpra0132b90462018-09-13 11:07:48 +0100331DECLARE_LAYER_POLICY_2_PARAM(Mean)
332
telsoa014fcda012018-03-09 14:13:49 +0000333DECLARE_LAYER_POLICY_2_PARAM(Merger)
334
335DECLARE_LAYER_POLICY_1_PARAM(Multiplication)
336
337DECLARE_LAYER_POLICY_2_PARAM(Normalization)
338
339DECLARE_LAYER_POLICY_CUSTOM_PARAM(Output, armnn::LayerBindingId)
340
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100341DECLARE_LAYER_POLICY_2_PARAM(Pad)
342
telsoa014fcda012018-03-09 14:13:49 +0000343DECLARE_LAYER_POLICY_2_PARAM(Permute)
344
345DECLARE_LAYER_POLICY_2_PARAM(Pooling2d)
346
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100347DECLARE_LAYER_POLICY_1_PARAM(Division)
348
telsoa014fcda012018-03-09 14:13:49 +0000349DECLARE_LAYER_POLICY_2_PARAM(ResizeBilinear)
350
telsoa01c577f2c2018-08-31 09:22:23 +0100351DECLARE_LAYER_POLICY_2_PARAM(Reshape)
352
telsoa014fcda012018-03-09 14:13:49 +0000353DECLARE_LAYER_POLICY_2_PARAM(Softmax)
354
355DECLARE_LAYER_POLICY_2_PARAM(Splitter)
356
David Beckc2044fe2018-09-05 15:00:38 +0100357DECLARE_LAYER_POLICY_1_PARAM(Subtraction)
telsoa014fcda012018-03-09 14:13:49 +0000358
359
360// Generic implementation to get the number of input slots for a given layer type;
361template<armnn::LayerType Type>
362unsigned int GetNumInputs(const armnn::Layer& layer)
363{
364 return layer.GetNumInputSlots();
365}
366
367// Generic implementation to get the number of output slots for a given layer type;
368template<armnn::LayerType Type>
369unsigned int GetNumOutputs(const armnn::Layer& layer)
370{
371 return layer.GetNumOutputSlots();
372}
373
374template<>
375unsigned int GetNumInputs<armnn::LayerType::Merger>(const armnn::Layer& layer)
376{
377 boost::ignore_unused(layer);
378 return 2;
379}
380
telsoa01c577f2c2018-08-31 09:22:23 +0100381// Tests that the IsLayerSupported() function returns the correct value.
382// We determined the correct value by *trying* to create the relevant workload and seeing if it matches what we expect.
telsoa014fcda012018-03-09 14:13:49 +0000383// Returns true if expectations are met, otherwise returns false.
384template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
385bool IsLayerSupportedTest(FactoryType *factory, Tag<Type>)
386{
387 using LayerPolicy = LayerTypePolicy<Type, DataType>;
388 using LayerType = typename LayerPolicy::Type;
389 using LayerDesc = typename LayerPolicy::Desc;
390 DummyLayer<LayerType, LayerDesc> layer;
391
392 unsigned int numIn = GetNumInputs<Type>(*layer.m_Layer);
393 unsigned int numOut = GetNumOutputs<Type>(*layer.m_Layer);
394
telsoa01c577f2c2018-08-31 09:22:23 +0100395 // Make another dummy layer just to make IsLayerSupported have valid inputs.
telsoa014fcda012018-03-09 14:13:49 +0000396 DummyLayer<armnn::ConstantLayer, void> previousLayer;
telsoa01c577f2c2018-08-31 09:22:23 +0100397 // Set output of the previous layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000398 armnn::TensorInfo output = MakeDummyTensorInfo<DataType>();
399 previousLayer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
telsoa01c577f2c2018-08-31 09:22:23 +0100400 // Connect all outputs of the previous layer to inputs of tested layer.
telsoa014fcda012018-03-09 14:13:49 +0000401 for (unsigned int i = 0; i < numIn; i++)
402 {
403 armnn::IOutputSlot& previousLayerOutputSlot = previousLayer.m_Layer->GetOutputSlot(0);
404 armnn::IInputSlot& layerInputSlot = layer.m_Layer->GetInputSlot(i);
405 previousLayerOutputSlot.Connect(layerInputSlot);
406 }
telsoa01c577f2c2018-08-31 09:22:23 +0100407 // Set outputs of tested layer to a dummy tensor.
telsoa014fcda012018-03-09 14:13:49 +0000408 for (unsigned int i = 0; i < numOut; i++)
409 {
410 layer.m_Layer->GetOutputSlot(0).SetTensorInfo(output);
411 }
412
413 std::string layerName = LayerPolicy::NameStr;
414 std::string reasonIfUnsupported;
415 if (FactoryType::IsLayerSupported(*layer.m_Layer, DataType, reasonIfUnsupported))
416 {
417 std::string errorMsg = " layer expected support but found none.";
418 try
419 {
420 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() != nullptr;
telsoa01c577f2c2018-08-31 09:22:23 +0100421 // hacky way (it has to be replaced): for Lstm, we only support F32 right now
422// BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
telsoa014fcda012018-03-09 14:13:49 +0000423 return retVal;
424 }
telsoa01c577f2c2018-08-31 09:22:23 +0100425 catch(const armnn::InvalidArgumentException& e)
telsoa014fcda012018-03-09 14:13:49 +0000426 {
427 boost::ignore_unused(e);
428 // This is ok since we throw InvalidArgumentException when creating the dummy workload.
429 return true;
430 }
431 catch(const std::exception& e)
432 {
433 errorMsg = e.what();
434 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
435 return false;
436 }
telsoa01c577f2c2018-08-31 09:22:23 +0100437 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000438 {
439 errorMsg = "Unexpected error while testing support for ";
440 BOOST_TEST_ERROR(errorMsg << layerName);
441 return false;
442 }
443 }
444 else
445 {
446 std::string errorMsg = "layer expected no support (giving reason: " + reasonIfUnsupported + ") but found some.";
447 try
448 {
449 bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() == nullptr;
450 BOOST_CHECK_MESSAGE(retVal, layerName << errorMsg);
451 return retVal;
452 }
453 // These two exceptions are ok: For workloads that are partially supported, attempting to instantiate them
454 // using parameters that make IsLayerSupported() return false should throw an
telsoa01c577f2c2018-08-31 09:22:23 +0100455 // InvalidArgumentException or UnimplementedException.
telsoa014fcda012018-03-09 14:13:49 +0000456 catch(const armnn::InvalidArgumentException& e)
457 {
458 boost::ignore_unused(e);
459 return true;
460 }
telsoa01c577f2c2018-08-31 09:22:23 +0100461 catch(const armnn::UnimplementedException& e)
telsoa014fcda012018-03-09 14:13:49 +0000462 {
463 boost::ignore_unused(e);
464 return true;
465 }
466 catch(const std::exception& e)
467 {
468 errorMsg = e.what();
469 BOOST_TEST_ERROR(layerName << ": " << errorMsg);
470 return false;
471 }
telsoa01c577f2c2018-08-31 09:22:23 +0100472 catch(...)
telsoa014fcda012018-03-09 14:13:49 +0000473 {
474 errorMsg = "Unexpected error while testing support for ";
475 BOOST_TEST_ERROR(errorMsg << layerName);
476 return false;
477 }
478 }
479}
480
telsoa01c577f2c2018-08-31 09:22:23 +0100481// Helper function to compute the next type in the LayerType enum.
telsoa014fcda012018-03-09 14:13:49 +0000482constexpr armnn::LayerType NextType(armnn::LayerType type)
483{
484 return static_cast<armnn::LayerType>(static_cast<int>(type)+1);
485}
486
telsoa01c577f2c2018-08-31 09:22:23 +0100487// Termination function for determining the end of the LayerType enumeration.
telsoa014fcda012018-03-09 14:13:49 +0000488template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
489bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<armnn::LayerType::LastLayer>)
490{
491 return IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
492};
493
telsoa01c577f2c2018-08-31 09:22:23 +0100494// Recursive function to test and enter in the LayerType enum and then iterate on the next entry.
telsoa014fcda012018-03-09 14:13:49 +0000495template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type>
496bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<Type>)
497{
498 bool v = IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>());
499
500 return v &&
501 IsLayerSupportedTestsImpl<FactoryType, DataType, NextType(Type)>
502 (factory, Tag<NextType(Type)>());
503};
504
505// Helper function to pass through to the test framework.
506template<typename FactoryType, armnn::DataType DataType>
507bool IsLayerSupportedTests(FactoryType *factory)
508{
509 return IsLayerSupportedTestsImpl<FactoryType, DataType>(factory, Tag<armnn::LayerType::FirstLayer>());
510};
511
512template<armnn::LayerType Type>
513bool TestLayerTypeMatches()
514{
515 using LayerPolicy = LayerTypePolicy<Type, armnn::DataType::Float32>;
516 using LayerType = typename LayerPolicy::Type;
517 using LayerDesc = typename LayerPolicy::Desc;
518 DummyLayer<LayerType, LayerDesc> layer;
519
520 std::stringstream ss;
521 ss << LayerPolicy::NameStr << " layer type mismatches expected layer type value.";
522 bool v = Type == layer.m_Layer->GetType();
523 BOOST_CHECK_MESSAGE(v, ss.str());
524 return v;
525};
526
527template<armnn::LayerType Type>
528bool LayerTypeMatchesTestImpl(Tag<armnn::LayerType::LastLayer>)
529{
530 return TestLayerTypeMatches<Type>();
531};
532
533template<armnn::LayerType Type>
534bool LayerTypeMatchesTestImpl(Tag<Type>)
535{
536 return TestLayerTypeMatches<Type>() &&
537 LayerTypeMatchesTestImpl<NextType(Type)>(Tag<NextType(Type)>());
538};
539
540bool LayerTypeMatchesTest()
541{
542 return LayerTypeMatchesTestImpl<armnn::LayerType::FirstLayer>(Tag<armnn::LayerType::FirstLayer>());
543};
544
telsoa01c577f2c2018-08-31 09:22:23 +0100545template<typename FactoryType, typename LayerType, armnn::DataType InputDataType , armnn::DataType OutputDataType>
546bool IsConvertLayerSupportedTests(std::string& reasonIfUnsupported)
547{
548 armnn::Graph graph;
549 LayerType* const layer = graph.AddLayer<LayerType>("LayerName");
550
551 armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input");
552 armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output");
553
554 armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, InputDataType);
555 armnn::TensorInfo outputTensorInfo({1, 3, 2, 3}, OutputDataType);
556
557 input->GetOutputSlot(0).Connect(layer->GetInputSlot(0));
558 input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo);
559 layer->GetOutputSlot(0).Connect(output->GetInputSlot(0));
560 layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo);
561
562 bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported);
563
564 return result;
565};
566
telsoa014fcda012018-03-09 14:13:49 +0000567} //namespace