blob: 2e1ce0a674fb5ceea0cb2443408129744824f9bf [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00005
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <Layer.hpp>
7#include <LayersFwd.hpp>
David Beckdcb751f2018-10-03 11:42:42 +01008
David Beckb4540be2018-09-24 13:18:27 +01009#include <armnn/Types.hpp>
10#include <armnn/LayerSupport.hpp>
David Beck111b5d92018-11-12 14:59:37 +000011#include <armnn/ILayerSupport.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000012#include <armnn/BackendRegistry.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000013
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000014#include <backendsCommon/WorkloadFactory.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000015#include <armnn/backends/IBackendInternal.hpp>
16#include <backendsCommon/CpuTensorHandle.hpp>
17#include <backendsCommon/WorkloadFactory.hpp>
18
Francis Murtagh46c09d02019-05-28 08:15:28 +010019#include <backendsCommon/test/WorkloadTestUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000020
21#include <boost/cast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000022#include <boost/iterator/transform_iterator.hpp>
23
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000024#include <cstring>
David Beck111b5d92018-11-12 14:59:37 +000025#include <sstream>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000026
telsoa014fcda012018-03-09 14:13:49 +000027namespace armnn
28{
29
telsoa01c577f2c2018-08-31 09:22:23 +010030namespace
31{
telsoa01c577f2c2018-08-31 09:22:23 +010032
David Beck29c75de2018-10-23 13:35:58 +010033const TensorInfo OverrideDataType(const TensorInfo& info, Optional<DataType> type)
34{
35 if (!type)
36 {
37 return info;
telsoa01c577f2c2018-08-31 09:22:23 +010038 }
39
David Beck29c75de2018-10-23 13:35:58 +010040 return TensorInfo(info.GetShape(), type.value(), info.GetQuantizationScale(), info.GetQuantizationOffset());
telsoa01c577f2c2018-08-31 09:22:23 +010041}
42
David Beck29c75de2018-10-23 13:35:58 +010043} // anonymous namespace
44
David Beck33f0ae02018-10-18 15:13:56 +010045bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
David Beckdcb751f2018-10-03 11:42:42 +010046 const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +010047 Optional<DataType> dataType,
David Beckdcb751f2018-10-03 11:42:42 +010048 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000049{
David Beck33f0ae02018-10-18 15:13:56 +010050 Optional<std::string&> reason = outReasonIfUnsupported;
telsoa014fcda012018-03-09 14:13:49 +000051 bool result;
David Beckdcb751f2018-10-03 11:42:42 +010052 const Layer& layer = *(boost::polymorphic_downcast<const Layer*>(&connectableLayer));
53
David Beck111b5d92018-11-12 14:59:37 +000054 auto const& backendRegistry = BackendRegistryInstance();
55 if (!backendRegistry.IsBackendRegistered(backendId))
56 {
57 std::stringstream ss;
58 ss << connectableLayer.GetName() << " is not supported on " << backendId
59 << " because this backend is not registered.";
60
61 outReasonIfUnsupported = ss.str();
62 return false;
63 }
64
65 auto backendFactory = backendRegistry.GetFactory(backendId);
66 auto backendObject = backendFactory();
67 auto layerSupportObject = backendObject->GetLayerSupport();
David Beck33f0ae02018-10-18 15:13:56 +010068
telsoa014fcda012018-03-09 14:13:49 +000069 switch(layer.GetType())
70 {
71 case LayerType::Activation:
72 {
73 auto cLayer = boost::polymorphic_downcast<const ActivationLayer*>(&layer);
74 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +010075 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010076 result = layerSupportObject->IsActivationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010077 OverrideDataType(input, dataType),
78 OverrideDataType(output, dataType),
79 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +010080 reason);
telsoa014fcda012018-03-09 14:13:49 +000081 break;
82 }
83 case LayerType::Addition:
84 {
85 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
86 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
87 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010088 result = layerSupportObject->IsAdditionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010089 OverrideDataType(input0, dataType),
90 OverrideDataType(input1, dataType),
91 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +010092 reason);
telsoa014fcda012018-03-09 14:13:49 +000093 break;
94 }
Nikhil Rajee391d52019-09-05 17:50:44 +010095 case LayerType::ArgMinMax:
96 {
97 auto cLayer = boost::polymorphic_downcast<const ArgMinMaxLayer*>(&layer);
98 const ArgMinMaxDescriptor& descriptor = cLayer->GetParameters();
99
100 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
101 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
102 result = layerSupportObject->IsArgMinMaxSupported(
103 OverrideDataType(input, dataType),
Narumol Prangnawaratd1f57732019-10-31 14:24:02 +0000104 OverrideDataType(output, DataType::Signed32),
Nikhil Rajee391d52019-09-05 17:50:44 +0100105 descriptor,
106 reason);
107 break;
108 }
telsoa014fcda012018-03-09 14:13:49 +0000109 case LayerType::BatchNormalization:
110 {
111 auto cLayer = boost::polymorphic_downcast<const BatchNormalizationLayer*>(&layer);
112 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100113 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
114 const TensorInfo& mean = cLayer->m_Mean->GetTensorInfo();
115 const TensorInfo& var = cLayer->m_Variance->GetTensorInfo();
116 const TensorInfo& beta = cLayer->m_Beta->GetTensorInfo();
117 const TensorInfo& gamma = cLayer->m_Gamma->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100118 result = layerSupportObject->IsBatchNormalizationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100119 OverrideDataType(input, dataType),
120 OverrideDataType(output, dataType),
121 OverrideDataType(mean, dataType),
122 OverrideDataType(var, dataType),
123 OverrideDataType(beta, dataType),
124 OverrideDataType(gamma, dataType),
125 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100126 reason);
telsoa014fcda012018-03-09 14:13:49 +0000127 break;
128 }
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000129 case LayerType::BatchToSpaceNd:
130 {
131 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
132 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
133 auto cLayer = boost::polymorphic_downcast<const BatchToSpaceNdLayer*>(&layer);
134
135 result = layerSupportObject->IsBatchToSpaceNdSupported(OverrideDataType(input, dataType),
136 OverrideDataType(output, dataType),
137 cLayer->GetParameters(),
138 reason);
139 break;
140 }
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100141 case LayerType::Comparison:
142 {
143 auto cLayer = boost::polymorphic_downcast<const ComparisonLayer*>(&layer);
144
145 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
146 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
147 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
148
149 result = layerSupportObject->IsComparisonSupported(OverrideDataType(input0, dataType),
150 OverrideDataType(input1, dataType),
151 OverrideDataType(output, DataType::Boolean),
152 cLayer->GetParameters(),
153 reason);
154 break;
155 }
telsoa014fcda012018-03-09 14:13:49 +0000156 case LayerType::Constant:
157 {
158 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100159 result = layerSupportObject->IsConstantSupported(OverrideDataType(output, dataType), reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100160 break;
161 }
162 case LayerType::ConvertFp16ToFp32:
163 {
164 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
165 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100166 result = layerSupportObject->IsConvertFp16ToFp32Supported(input, output, reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100167 break;
168 }
169 case LayerType::ConvertFp32ToFp16:
170 {
171 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
172 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100173 result = layerSupportObject->IsConvertFp32ToFp16Supported(input, output, reason);
telsoa014fcda012018-03-09 14:13:49 +0000174 break;
175 }
176 case LayerType::Convolution2d:
177 {
178 auto cLayer = boost::polymorphic_downcast<const Convolution2dLayer*>(&layer);
arovir01a6824102018-08-28 17:40:45 +0100179
180 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
181 dataType);
telsoa01c577f2c2018-08-31 09:22:23 +0100182 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
surmeh013537c2c2018-05-18 16:31:43 +0100183 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
184
arovir01a6824102018-08-28 17:40:45 +0100185 const Convolution2dDescriptor& descriptor = cLayer->GetParameters();
surmeh013537c2c2018-05-18 16:31:43 +0100186
arovir01a6824102018-08-28 17:40:45 +0100187 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100188 Optional<TensorInfo> biases;
surmeh013537c2c2018-05-18 16:31:43 +0100189 if (descriptor.m_BiasEnabled)
190 {
David Beck5eec11d2018-10-04 15:43:17 +0100191 biases =
192 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
surmeh013537c2c2018-05-18 16:31:43 +0100193 }
194
David Beck33f0ae02018-10-18 15:13:56 +0100195 result = layerSupportObject->IsConvolution2dSupported(
surmeh013537c2c2018-05-18 16:31:43 +0100196 input,
197 output,
198 descriptor,
telsoa01c577f2c2018-08-31 09:22:23 +0100199 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100200 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100201 reason);
telsoa014fcda012018-03-09 14:13:49 +0000202 break;
203 }
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000204 case LayerType::Debug:
205 {
206 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
207 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
208
209 result = layerSupportObject->IsDebugSupported(OverrideDataType(input, dataType),
210 OverrideDataType(output, dataType),
211 reason);
212 break;
213 }
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100214 case LayerType::DepthToSpace:
215 {
216 auto cLayer = boost::polymorphic_downcast<const DepthToSpaceLayer*>(&layer);
217
218 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
219 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
220
221 result = layerSupportObject->IsDepthToSpaceSupported(OverrideDataType(input, dataType),
222 OverrideDataType(output, dataType),
223 cLayer->GetParameters(),
224 reason);
225 break;
226 }
telsoa014fcda012018-03-09 14:13:49 +0000227 case LayerType::DepthwiseConvolution2d:
228 {
229 auto cLayer = boost::polymorphic_downcast<const DepthwiseConvolution2dLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100230 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
231 dataType);
232 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
233 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
234
telsoa01c577f2c2018-08-31 09:22:23 +0100235 const DepthwiseConvolution2dDescriptor& descriptor = cLayer->GetParameters();
arovir01a6824102018-08-28 17:40:45 +0100236
237 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100238 Optional<TensorInfo> biases;
telsoa01c577f2c2018-08-31 09:22:23 +0100239 if (descriptor.m_BiasEnabled)
240 {
David Beck5eec11d2018-10-04 15:43:17 +0100241 biases =
242 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
telsoa01c577f2c2018-08-31 09:22:23 +0100243 }
telsoa01c577f2c2018-08-31 09:22:23 +0100244
David Beck33f0ae02018-10-18 15:13:56 +0100245 result = layerSupportObject->IsDepthwiseConvolutionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100246 input,
247 output,
248 descriptor,
249 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100250 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100251 reason);
telsoa014fcda012018-03-09 14:13:49 +0000252 break;
253 }
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000254 case LayerType::Dequantize:
255 {
256 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
257 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
258
Aron Virginas-Tar87972be2019-11-13 15:16:28 +0000259 result = layerSupportObject->IsDequantizeSupported(input,
260 OverrideDataType(output, dataType),
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000261 reason);
262 break;
263 }
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000264 case LayerType::DetectionPostProcess:
265 {
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000266 auto cLayer = boost::polymorphic_downcast<const DetectionPostProcessLayer*>(&layer);
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000267 const TensorInfo& boxEncodings = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
268 const TensorInfo& scores = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
269 const TensorInfo& anchors = cLayer->m_Anchors->GetTensorInfo();
270
271 const TensorInfo& detectionBoxes = layer.GetOutputSlot(0).GetTensorInfo();
272 const TensorInfo& detectionClasses = layer.GetOutputSlot(1).GetTensorInfo();
273 const TensorInfo& detectionScores = layer.GetOutputSlot(2).GetTensorInfo();
274 const TensorInfo& numDetections = layer.GetOutputSlot(3).GetTensorInfo();
275
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000276 const DetectionPostProcessDescriptor& descriptor = cLayer->GetParameters();
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000277 result = layerSupportObject->IsDetectionPostProcessSupported(boxEncodings,
278 scores,
279 anchors,
280 detectionBoxes,
281 detectionClasses,
282 detectionScores,
283 numDetections,
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000284 descriptor,
285 reason);
286 break;
287 }
josh minor4a3c6102020-01-06 16:40:46 -0600288 case LayerType::ElementwiseUnary:
289 {
290 auto cLayer = boost::polymorphic_downcast<const ElementwiseUnaryLayer*>(&layer);
291
292 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
293 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
294
295 result = layerSupportObject->IsElementwiseUnarySupported(OverrideDataType(input, dataType),
296 OverrideDataType(output, dataType),
297 cLayer->GetParameters(),
298 reason);
299 break;
300 }
telsoa014fcda012018-03-09 14:13:49 +0000301 case LayerType::FakeQuantization:
302 {
303 auto cLayer = boost::polymorphic_downcast<const FakeQuantizationLayer*>(&layer);
304 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100305 result = layerSupportObject->IsFakeQuantizationSupported(OverrideDataType(input, dataType),
306 cLayer->GetParameters(),
307 reason);
telsoa014fcda012018-03-09 14:13:49 +0000308 break;
309 }
310 case LayerType::Floor:
311 {
312 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
313 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100314 result = layerSupportObject->IsFloorSupported(OverrideDataType(input, dataType),
315 OverrideDataType(output, dataType),
316 reason);
telsoa014fcda012018-03-09 14:13:49 +0000317 break;
318 }
319 case LayerType::FullyConnected:
320 {
321 auto cLayer = boost::polymorphic_downcast<const FullyConnectedLayer*>(&layer);
322 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100323 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
324 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
325
326 TensorInfo biasInfo;
327 const TensorInfo * biasInfoPtr = nullptr;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000328 static const TensorInfo dummyBFloat16Bias(TensorShape({1,1,1,1}), DataType::BFloat16);
telsoa01c577f2c2018-08-31 09:22:23 +0100329 static const TensorInfo dummyFloat16Bias(TensorShape({1,1,1,1}), DataType::Float16);
330 static const TensorInfo dummyFloat32Bias(TensorShape({1,1,1,1}), DataType::Float32);
331 static const TensorInfo dummyQA8Bias(TensorShape({1,1,1,1}), DataType::Signed32);
332
333 const FullyConnectedDescriptor& descriptor = cLayer->GetParameters();
334 if (descriptor.m_BiasEnabled)
335 {
336 BOOST_ASSERT(cLayer->m_Bias.get() != nullptr);
337 biasInfo = OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
338 biasInfoPtr = &biasInfo;
339 }
340 else
341 {
342 // If biases are not enabled pass a dummy tensorinfo for the validation
343 switch(input.GetDataType())
344 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000345 case DataType::BFloat16:
346 {
347 biasInfoPtr = &dummyBFloat16Bias;
348 break;
349 }
telsoa01c577f2c2018-08-31 09:22:23 +0100350 case DataType::Float16:
351 {
352 biasInfoPtr = &dummyFloat16Bias;
353 break;
354 }
355 case DataType::Float32:
356 {
357 biasInfoPtr = &dummyFloat32Bias;
358 break;
359 }
Derek Lambertif90c56d2020-01-10 17:14:08 +0000360 case DataType::QAsymmU8:
Keith Davisa8565012020-02-14 12:22:40 +0000361 case DataType::QAsymmS8:
Keith Davis9d0ff742020-02-03 14:47:54 +0000362 case DataType::QSymmS8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000363 case DataType::QSymmS16:
telsoa01c577f2c2018-08-31 09:22:23 +0100364 {
365 biasInfoPtr = &dummyQA8Bias;
366 break;
367 }
368 default:
369 {
370 BOOST_ASSERT_MSG(false, "Unexpected bias type");
371 }
372 }
373 }
374
David Beck33f0ae02018-10-18 15:13:56 +0100375 result = layerSupportObject->IsFullyConnectedSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100376 OverrideDataType(input, dataType),
377 OverrideDataType(output, dataType),
378 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
379 *biasInfoPtr,
380 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100381 reason);
telsoa014fcda012018-03-09 14:13:49 +0000382 break;
383 }
narpra01b89b05f2019-01-16 09:53:09 +0000384 case LayerType::Gather:
385 {
386 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
387 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
388 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
389 result = layerSupportObject->IsGatherSupported(OverrideDataType(input0, dataType),
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100390 input1,
narpra01b89b05f2019-01-16 09:53:09 +0000391 OverrideDataType(output, dataType),
392 reason);
393 break;
394 }
telsoa014fcda012018-03-09 14:13:49 +0000395 case LayerType::Input:
396 {
397 const TensorInfo& input = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100398 result = layerSupportObject->IsInputSupported(OverrideDataType(input, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000399 break;
400 }
Kevin Mayce5045a2019-10-02 14:07:47 +0100401 case LayerType::InstanceNormalization:
402 {
403 auto cLayer = boost::polymorphic_downcast<const InstanceNormalizationLayer*>(&layer);
404 const InstanceNormalizationDescriptor& descriptor = cLayer->GetParameters();
405
406 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
407 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
408
409 result = layerSupportObject->IsInstanceNormalizationSupported(
410 OverrideDataType(input, dataType),
411 OverrideDataType(output, dataType),
412 descriptor,
413 reason);
414 break;
415 }
telsoa014fcda012018-03-09 14:13:49 +0000416 case LayerType::L2Normalization:
417 {
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100418 auto cLayer = boost::polymorphic_downcast<const L2NormalizationLayer*>(&layer);
419 const L2NormalizationDescriptor& descriptor = cLayer->GetParameters();
420
telsoa014fcda012018-03-09 14:13:49 +0000421 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100422 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100423
David Beck33f0ae02018-10-18 15:13:56 +0100424 result = layerSupportObject->IsL2NormalizationSupported(
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100425 OverrideDataType(input, dataType),
426 OverrideDataType(output, dataType),
427 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100428 reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100429 break;
430 }
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100431 case LayerType::LogSoftmax:
432 {
433 auto cLayer = boost::polymorphic_downcast<const LogSoftmaxLayer*>(&layer);
434
435 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
436 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
437
438 result = layerSupportObject->IsLogSoftmaxSupported(OverrideDataType(input, dataType),
439 OverrideDataType(output, dataType),
440 cLayer->GetParameters(),
441 reason);
442 break;
443 }
telsoa01c577f2c2018-08-31 09:22:23 +0100444 case LayerType::Lstm:
445 {
446 auto cLayer = boost::polymorphic_downcast<const LstmLayer*>(&layer);
447 const LstmDescriptor& descriptor = cLayer->GetParameters();
448
449 // All inputs.
450 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
451 dataType);
452 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
453 dataType);
454 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
455 dataType);
456 // All outputs
457 const TensorInfo& scratchBuffer = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
458 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
459 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
460 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(3).GetTensorInfo(), dataType);
461
462 // Basic parameters
463 const TensorInfo& inputToForgetWeights
464 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
465 const TensorInfo& inputToCellWeights
466 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
467 const TensorInfo& inputToOutputWeights
468 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
469 const TensorInfo& recurrentToForgetWeights
470 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
471 const TensorInfo& recurrentToCellWeights
472 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
473 const TensorInfo& recurrentToOutputWeights
474 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
475 const TensorInfo& forgetGateBias
476 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
477 const TensorInfo& cellBias
478 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
479 const TensorInfo& outputGateBias
480 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
481
Jan Eilersd01a83c2019-07-03 18:20:40 +0100482 LstmInputParamsInfo paramsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100483
Jan Eilersd01a83c2019-07-03 18:20:40 +0100484 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
485 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
486 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
487 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
488 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
489 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
490 paramsInfo.m_ForgetGateBias = &forgetGateBias;
491 paramsInfo.m_CellBias = &cellBias;
492 paramsInfo.m_OutputGateBias = &outputGateBias;
493
494
495 // Optional parameters
telsoa01c577f2c2018-08-31 09:22:23 +0100496 TensorInfo optInputToInputWeights;
497 TensorInfo optRecurrentToInputWeights;
498 TensorInfo optCellToInputWeights;
499 TensorInfo optInputGateBias;
500 TensorInfo optProjectionWeights;
501 TensorInfo optProjectionBias;
502 TensorInfo optCellToForgetWeights;
503 TensorInfo optCellToOutputWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100504 TensorInfo optInputLayerNormWeights;
505 TensorInfo optForgetLayerNormWeights;
506 TensorInfo optCellLayerNormWeights;
507 TensorInfo optOutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100508
509 if(!descriptor.m_CifgEnabled)
510 {
511 optInputToInputWeights =
512 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100513 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100514
515 optRecurrentToInputWeights =
516 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100517 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100518 if (cLayer->m_CifgParameters.m_CellToInputWeights != nullptr)
519 {
520 optCellToInputWeights =
521 OverrideDataType(cLayer->m_CifgParameters.m_CellToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100522 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100523 }
524 optInputGateBias =
525 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100526 paramsInfo.m_InputGateBias = &optInputGateBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100527 }
528
529 if(descriptor.m_ProjectionEnabled)
530 {
531 optProjectionWeights =
532 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100533 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100534 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
535 {
536 optProjectionBias =
537 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100538 paramsInfo.m_ProjectionBias = &optProjectionBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100539 }
540 }
541
542 if(descriptor.m_PeepholeEnabled)
543 {
544 optCellToForgetWeights =
545 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100546 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100547 optCellToOutputWeights =
548 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100549 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100550 }
551
Jan Eilers38e05bd2019-06-26 13:10:09 +0100552 if(descriptor.m_LayerNormEnabled)
553 {
Ferran Balaguere30c16e2019-07-24 17:03:45 +0100554 if (!descriptor.m_CifgEnabled)
555 {
556 optInputLayerNormWeights = OverrideDataType(
557 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
558 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
559 }
Jan Eilers38e05bd2019-06-26 13:10:09 +0100560
561 optForgetLayerNormWeights = OverrideDataType(
562 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100563 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100564
565 optCellLayerNormWeights = OverrideDataType(
566 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100567 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100568
569 optOutputLayerNormWeights = OverrideDataType(
570 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100571 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100572 }
573
David Beck33f0ae02018-10-18 15:13:56 +0100574 result = layerSupportObject->IsLstmSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100575 input,
576 outputStateIn,
577 cellStateIn,
578 scratchBuffer,
579 outputStateOut,
580 cellStateOut,
581 output,
582 descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +0100583 paramsInfo,
584 reason);
telsoa014fcda012018-03-09 14:13:49 +0000585 break;
586 }
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000587 case LayerType::Maximum:
588 {
589 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
590 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
591 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
592
593 result = layerSupportObject->IsMaximumSupported(OverrideDataType(input0, dataType),
594 OverrideDataType(input1, dataType),
595 OverrideDataType(output, dataType),
596 reason);
597 break;
598 }
narpra01b89b05f2019-01-16 09:53:09 +0000599 case LayerType::MemCopy:
600 {
601 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
602 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000603
narpra01b89b05f2019-01-16 09:53:09 +0000604 result = layerSupportObject->IsMemCopySupported(OverrideDataType(input, dataType),
605 OverrideDataType(output, dataType),
606 reason);
607 break;
608 }
Derek Lambertif674aa02019-08-01 15:56:25 +0100609 case LayerType::MemImport:
610 {
611 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
612 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
613
614 result = layerSupportObject->IsMemImportSupported(OverrideDataType(input, dataType),
615 OverrideDataType(output, dataType),
616 reason);
617 break;
618 }
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100619 case LayerType::Merge:
620 {
621 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
622 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
623 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
624
625 result = layerSupportObject->IsMergeSupported(OverrideDataType(input0, dataType),
626 OverrideDataType(input1, dataType),
627 OverrideDataType(output, dataType),
628 reason);
629 break;
630 }
Jim Flynne242f2d2019-05-22 14:24:13 +0100631 case LayerType::Concat:
telsoa014fcda012018-03-09 14:13:49 +0000632 {
Jim Flynne242f2d2019-05-22 14:24:13 +0100633 auto cLayer = boost::polymorphic_downcast<const ConcatLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000634
telsoa01c577f2c2018-08-31 09:22:23 +0100635 // Get vector of all inputs.
636 auto getTensorInfo = [&dataType](const InputSlot& slot)
telsoa014fcda012018-03-09 14:13:49 +0000637 {
telsoa01c577f2c2018-08-31 09:22:23 +0100638 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
telsoa014fcda012018-03-09 14:13:49 +0000639 };
telsoa01c577f2c2018-08-31 09:22:23 +0100640 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
641 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
642 std::vector<TensorInfo> inputs(beginI, endI);
telsoa014fcda012018-03-09 14:13:49 +0000643
telsoa01c577f2c2018-08-31 09:22:23 +0100644 auto getTensorInfoPtr = [](const TensorInfo& info)
645 {
646 return &info;
647 };
648 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
649 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
650 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
telsoa014fcda012018-03-09 14:13:49 +0000651
Nikhil Raj8599a412018-11-19 14:51:07 +0000652 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
653
Jim Flynne242f2d2019-05-22 14:24:13 +0100654 result = layerSupportObject->IsConcatSupported(inputPtrs, output, cLayer->GetParameters(), reason);
655
656
telsoa014fcda012018-03-09 14:13:49 +0000657 break;
658 }
659 case LayerType::Multiplication:
660 {
661 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
662 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100663 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100664 result = layerSupportObject->IsMultiplicationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100665 OverrideDataType(input0, dataType),
666 OverrideDataType(input1, dataType),
667 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100668 reason);
telsoa014fcda012018-03-09 14:13:49 +0000669 break;
670 }
671 case LayerType::Normalization:
672 {
673 auto cLayer = boost::polymorphic_downcast<const NormalizationLayer*>(&layer);
674 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
675 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100676 result = layerSupportObject->IsNormalizationSupported(OverrideDataType(input, dataType),
677 OverrideDataType(output, dataType),
678 cLayer->GetParameters(),
679 reason);
telsoa014fcda012018-03-09 14:13:49 +0000680 break;
681 }
682 case LayerType::Output:
683 {
684 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100685 result = layerSupportObject->IsOutputSupported(OverrideDataType(output, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000686 break;
687 }
688 case LayerType::Permute:
689 {
690 auto cLayer = boost::polymorphic_downcast<const PermuteLayer*>(&layer);
691 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
692 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100693 result = layerSupportObject->IsPermuteSupported(OverrideDataType(input, dataType),
694 OverrideDataType(output, dataType),
695 cLayer->GetParameters(),
696 reason);
telsoa014fcda012018-03-09 14:13:49 +0000697 break;
698 }
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100699 case LayerType::Pad:
700 {
701 auto cLayer = boost::polymorphic_downcast<const PadLayer*>(&layer);
702 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
703 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100704 result = layerSupportObject->IsPadSupported(
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100705 OverrideDataType(input, dataType),
706 OverrideDataType(output, dataType),
707 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100708 reason);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100709 break;
710 }
telsoa014fcda012018-03-09 14:13:49 +0000711 case LayerType::Pooling2d:
712 {
713 auto cLayer = boost::polymorphic_downcast<const Pooling2dLayer*>(&layer);
714 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
715 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100716 result = layerSupportObject->IsPooling2dSupported(OverrideDataType(input, dataType),
717 OverrideDataType(output, dataType),
718 cLayer->GetParameters(),
719 reason);
telsoa014fcda012018-03-09 14:13:49 +0000720 break;
721 }
Matteo Martincigh49124022019-01-11 13:25:59 +0000722 case LayerType::PreCompiled:
723 {
724 auto cLayer = boost::polymorphic_downcast<const PreCompiledLayer*>(&layer);
725 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
726 result = layerSupportObject->IsPreCompiledSupported(OverrideDataType(input, dataType),
727 cLayer->GetParameters(),
728 reason);
729 break;
730 }
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000731 case LayerType::Quantize:
732 {
733 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
734 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
735 result = layerSupportObject->IsQuantizeSupported(input, output, reason);
736 break;
737 }
James Conroyee18dc82019-07-17 11:27:46 +0100738 case LayerType::QuantizedLstm:
739 {
740 auto cLayer = boost::polymorphic_downcast<const QuantizedLstmLayer*>(&layer);
741
742 // Inputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100743 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
744 const TensorInfo& previousCellStateIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
745 const TensorInfo& previousOutputIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100746
747 // Outputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100748 const TensorInfo& cellStateOut = layer.GetOutputSlot(0).GetTensorInfo();
749 const TensorInfo& output = layer.GetOutputSlot(1).GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100750
751 // QuantizedLstm parameters
James Conroyee18dc82019-07-17 11:27:46 +0100752 QuantizedLstmInputParamsInfo paramsInfo;
753
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100754 paramsInfo.m_InputToInputWeights =
755 &cLayer->m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo();
756 paramsInfo.m_InputToForgetWeights =
757 &cLayer->m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo();
758 paramsInfo.m_InputToCellWeights =
759 &cLayer->m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo();
760 paramsInfo.m_InputToOutputWeights =
761 &cLayer->m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100762
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100763 paramsInfo.m_RecurrentToInputWeights =
764 &cLayer->m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo();
765 paramsInfo.m_RecurrentToForgetWeights =
766 &cLayer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo();
767 paramsInfo.m_RecurrentToCellWeights =
768 &cLayer->m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo();
769 paramsInfo.m_RecurrentToOutputWeights =
770 &cLayer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100771
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100772 paramsInfo.m_InputGateBias =
773 &cLayer->m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo();
774 paramsInfo.m_ForgetGateBias =
775 &cLayer->m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo();
776 paramsInfo.m_CellBias =
777 &cLayer->m_QuantizedLstmParameters.m_CellBias->GetTensorInfo();
778 paramsInfo.m_OutputGateBias =
779 &cLayer->m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo();;
James Conroyee18dc82019-07-17 11:27:46 +0100780
781 result = layerSupportObject->IsQuantizedLstmSupported(input,
782 previousCellStateIn,
783 previousOutputIn,
784 cellStateOut,
785 output,
786 paramsInfo,
787 reason);
788 break;
789 }
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100790 case LayerType::Division:
791 {
792 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
793 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
794 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100795 result = layerSupportObject->IsDivisionSupported(
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100796 OverrideDataType(input0, dataType),
797 OverrideDataType(input1, dataType),
798 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100799 reason);
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100800 break;
801 }
telsoa014fcda012018-03-09 14:13:49 +0000802 case LayerType::Reshape:
803 {
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000804 auto cLayer = boost::polymorphic_downcast<const ReshapeLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000805 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Kevin Maya023c402019-12-12 17:28:05 +0000806 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000807 result = layerSupportObject->IsReshapeSupported(OverrideDataType(input, dataType),
Kevin Maya023c402019-12-12 17:28:05 +0000808 OverrideDataType(output, dataType),
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000809 cLayer->GetParameters(),
810 reason);
telsoa014fcda012018-03-09 14:13:49 +0000811 break;
812 }
Teresa Charlina9075df2019-06-27 15:41:57 +0100813 case LayerType::Resize:
814 {
815 auto cLayer = boost::polymorphic_downcast<const ResizeLayer*>(&layer);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100816 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Teresa Charlina9075df2019-06-27 15:41:57 +0100817 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
818 result = layerSupportObject->IsResizeSupported(OverrideDataType(input, dataType),
819 OverrideDataType(output, dataType),
820 cLayer->GetParameters(),
821 reason);
822 break;
823 }
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100824 case LayerType::Slice:
825 {
826 auto cLayer = boost::polymorphic_downcast<const SliceLayer*>(&layer);
827
828 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
829 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
830
831 result = layerSupportObject->IsSliceSupported(OverrideDataType(input, dataType),
832 OverrideDataType(output, dataType),
833 cLayer->GetParameters(),
834 reason);
835 break;
836 }
telsoa014fcda012018-03-09 14:13:49 +0000837 case LayerType::Softmax:
838 {
839 auto cLayer = boost::polymorphic_downcast<const SoftmaxLayer*>(&layer);
840 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100841 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100842 result = layerSupportObject->IsSoftmaxSupported(OverrideDataType(input, dataType),
843 OverrideDataType(output, dataType),
844 cLayer->GetParameters(),
845 reason);
telsoa014fcda012018-03-09 14:13:49 +0000846 break;
847 }
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000848 case LayerType::SpaceToBatchNd:
849 {
850 auto cLayer = boost::polymorphic_downcast<const SpaceToBatchNdLayer*>(&layer);
851 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
852 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
853 result = layerSupportObject->IsSpaceToBatchNdSupported(OverrideDataType(input, dataType),
854 OverrideDataType(output, dataType),
855 cLayer->GetParameters(),
856 reason);
857 break;
858 }
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100859 case LayerType::SpaceToDepth:
860 {
861 auto cLayer = boost::polymorphic_downcast<const SpaceToDepthLayer*>(&layer);
862
863 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
864 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
865
866 result = layerSupportObject->IsSpaceToDepthSupported(OverrideDataType(input, dataType),
867 OverrideDataType(output, dataType),
868 cLayer->GetParameters(),
869 reason);
870 break;
871 }
telsoa014fcda012018-03-09 14:13:49 +0000872 case LayerType::Splitter:
873 {
874 auto cLayer = boost::polymorphic_downcast<const SplitterLayer*>(&layer);
875 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +0100876
877 // Get vector of all outputs.
878 auto getTensorInfo = [&dataType](const OutputSlot& slot)
879 {
880 return OverrideDataType(slot.GetTensorInfo(), dataType);
881 };
882 auto beginI = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfo);
883 auto endI = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfo);
884 std::vector<TensorInfo> outputs(beginI, endI);
885
886 const std::vector<std::reference_wrapper<TensorInfo>> outputPtrs(outputs.begin(), outputs.end());
887
David Beck33f0ae02018-10-18 15:13:56 +0100888 result = layerSupportObject->IsSplitterSupported(OverrideDataType(input, dataType),
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +0100889 outputPtrs,
David Beck33f0ae02018-10-18 15:13:56 +0100890 cLayer->GetParameters(),
891 reason);
telsoa014fcda012018-03-09 14:13:49 +0000892 break;
893 }
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100894 case LayerType::Stack:
895 {
896 auto cLayer = boost::polymorphic_downcast<const StackLayer*>(&layer);
897
898 // Get vector of all inputs.
899 auto getTensorInfo = [&dataType](const InputSlot& slot)
900 {
901 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
902 };
903 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
904 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
905 std::vector<TensorInfo> inputs(beginI, endI);
906
907 auto getTensorInfoPtr = [](const TensorInfo& info)
908 {
909 return &info;
910 };
911 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
912 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
913 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
914
915 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
916
917 result = layerSupportObject->IsStackSupported(inputPtrs, output, cLayer->GetParameters(), reason);
918
919 break;
920 }
Derek Lamberti013c3902019-10-21 10:46:16 +0100921 case LayerType::StandIn:
922 {
923 auto cLayer = boost::polymorphic_downcast<const StandInLayer*>(&layer);
924
925 // Get vector of all inputs.
926 auto getTensorInfoIn = [&dataType](const InputSlot& slot)
927 {
928 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
929 };
930 auto getTensorInfoOut = [&dataType](const OutputSlot& slot)
931 {
932 return OverrideDataType(slot.GetTensorInfo(), dataType);
933 };
934 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfoIn);
935 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfoIn);
936 std::vector<TensorInfo> inputs(beginI, endI);
937
938 auto beginO = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfoOut);
939 auto endO = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfoOut);
940 std::vector<TensorInfo> outputs(beginO, endO);
941
942
943 auto getTensorInfoPtr = [](const TensorInfo& info)
944 {
945 return &info;
946 };
947 auto beginPtrI = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
948 auto endPtrI = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
949 std::vector<const TensorInfo*> inputPtrs(beginPtrI, endPtrI);
950
951 auto beginPtrO = boost::make_transform_iterator(outputs.begin(), getTensorInfoPtr);
952 auto endPtrO = boost::make_transform_iterator(outputs.end(), getTensorInfoPtr);
953 std::vector<const TensorInfo*> outputPtrs(beginPtrO, endPtrO);
954
955
956 result = layerSupportObject->IsStandInSupported(inputPtrs,
957 outputPtrs,
958 cLayer->GetParameters(),
959 reason);
960 break;
961 }
Conor Kennedy430b5d82018-11-14 15:28:28 +0000962 case LayerType::StridedSlice:
963 {
964 auto cLayer = boost::polymorphic_downcast<const StridedSliceLayer*>(&layer);
965 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
966 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
967 result = layerSupportObject->IsStridedSliceSupported(OverrideDataType(input, dataType),
968 OverrideDataType(output, dataType),
969 cLayer->GetParameters(),
970 reason);
971 break;
972 }
David Beckc2044fe2018-09-05 15:00:38 +0100973 case LayerType::Subtraction:
974 {
975 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
976 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
977 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100978 result = layerSupportObject->IsSubtractionSupported(
David Beckc2044fe2018-09-05 15:00:38 +0100979 OverrideDataType(input0, dataType),
980 OverrideDataType(input1, dataType),
981 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100982 reason);
David Beckc2044fe2018-09-05 15:00:38 +0100983 break;
984 }
Sadik Armaganeff363d2019-04-05 15:25:46 +0100985 case LayerType::Switch:
986 {
987 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
988 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
989 const TensorInfo& output0 = layer.GetOutputSlot(0).GetTensorInfo();
990 const TensorInfo& output1 = layer.GetOutputSlot(1).GetTensorInfo();
991 result = layerSupportObject->IsSwitchSupported(OverrideDataType(input0, dataType),
992 OverrideDataType(input1, dataType),
993 OverrideDataType(output0, dataType),
994 OverrideDataType(output1, dataType),
995 reason);
996 break;
997 }
narpra0132b90462018-09-13 11:07:48 +0100998 case LayerType::Mean:
999 {
1000 auto cLayer = boost::polymorphic_downcast<const MeanLayer*>(&layer);
1001 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1002 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +01001003 result = layerSupportObject->IsMeanSupported(
narpra0132b90462018-09-13 11:07:48 +01001004 OverrideDataType(input, dataType),
1005 OverrideDataType(output, dataType),
1006 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +01001007 reason);
narpra0132b90462018-09-13 11:07:48 +01001008 break;
1009 }
kevmay0190539692018-11-29 08:40:19 +00001010 case LayerType::Minimum:
1011 {
1012 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1013 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1014 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1015 result = layerSupportObject->IsMinimumSupported(OverrideDataType(input0, dataType),
1016 OverrideDataType(input1, dataType),
1017 OverrideDataType(output, dataType),
1018 reason);
1019 break;
1020 }
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001021 case LayerType::Prelu:
1022 {
1023 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1024 const TensorInfo& alpha = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1025 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1026 result = layerSupportObject->IsPreluSupported(OverrideDataType(input, dataType),
1027 OverrideDataType(alpha, dataType),
1028 OverrideDataType(output, dataType),
1029 reason);
1030 break;
1031 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001032 case LayerType::Transpose:
1033 {
1034 auto cLayer = boost::polymorphic_downcast<const TransposeLayer*>(&layer);
1035 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1036 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1037 result = layerSupportObject->IsTransposeSupported(OverrideDataType(input, dataType),
1038 OverrideDataType(output, dataType),
1039 cLayer->GetParameters(),
1040 reason);
1041 break;
1042 }
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001043 case LayerType::TransposeConvolution2d:
1044 {
1045 auto cLayer = boost::polymorphic_downcast<const TransposeConvolution2dLayer*>(&layer);
1046
1047 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1048 dataType);
1049 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1050
1051 const TransposeConvolution2dDescriptor& descriptor = cLayer->GetParameters();
1052
1053 Optional<TensorInfo> biases;
1054 if (descriptor.m_BiasEnabled)
1055 {
1056 BOOST_ASSERT(cLayer->m_Bias.get() != nullptr);
1057 biases = OverrideDataType(cLayer->m_Bias->GetTensorInfo(),
1058 GetBiasTypeFromWeightsType(dataType));
1059 }
1060
1061 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
1062 const TensorInfo weights = OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType);
1063
1064 result = layerSupportObject->IsTransposeConvolution2dSupported(input,
1065 output,
1066 descriptor,
1067 weights,
1068 biases,
1069 reason);
1070
1071 break;
1072 }
telsoa014fcda012018-03-09 14:13:49 +00001073 default:
1074 {
1075 BOOST_ASSERT_MSG(false, "WorkloadFactory did not recognise type of layer.");
David Beck33f0ae02018-10-18 15:13:56 +01001076 reason.value() = "Unrecognised layer type";
telsoa014fcda012018-03-09 14:13:49 +00001077 result = false;
1078 break;
1079 }
1080 }
telsoa014fcda012018-03-09 14:13:49 +00001081 return result;
1082}
1083
David Beckdcb751f2018-10-03 11:42:42 +01001084bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +01001085 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +01001086 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +00001087{
David Beckdcb751f2018-10-03 11:42:42 +01001088 auto layer = boost::polymorphic_downcast<const Layer*>(&connectableLayer);
David Beck33f0ae02018-10-18 15:13:56 +01001089 return IsLayerSupported(layer->GetBackendId(), connectableLayer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +00001090}
1091
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001092// Default Implementations
Derek Lamberti901ea112019-12-10 22:07:09 +00001093std::unique_ptr<IWorkload> IWorkloadFactory::CreateAbs(const AbsQueueDescriptor& /*descriptor*/,
1094 const WorkloadInfo& /*info*/) const
Kevin May868eb142019-09-04 17:29:31 +01001095{
1096 return std::unique_ptr<IWorkload>();
1097}
1098
Derek Lamberti901ea112019-12-10 22:07:09 +00001099std::unique_ptr<IWorkload> IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& /*descriptor*/,
1100 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001101{
1102 return std::unique_ptr<IWorkload>();
1103}
1104
Derek Lamberti901ea112019-12-10 22:07:09 +00001105std::unique_ptr<IWorkload> IWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& /*descriptor*/,
1106 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001107{
1108 return std::unique_ptr<IWorkload>();
1109}
1110
Derek Lamberti901ea112019-12-10 22:07:09 +00001111std::unique_ptr<IWorkload> IWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& /*descriptor*/,
1112 const WorkloadInfo& /*info*/) const
Nikhil Rajee391d52019-09-05 17:50:44 +01001113{
1114 return std::unique_ptr<IWorkload>();
1115}
1116
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001117std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00001118 const BatchNormalizationQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001119{
1120 return std::unique_ptr<IWorkload>();
1121}
1122
Derek Lamberti901ea112019-12-10 22:07:09 +00001123std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& /*desc*/,
1124 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001125{
1126 return std::unique_ptr<IWorkload>();
1127}
1128
Derek Lamberti901ea112019-12-10 22:07:09 +00001129std::unique_ptr<IWorkload> IWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& /*descriptor*/,
1130 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001131{
1132 return std::unique_ptr<IWorkload>();
1133}
1134
Derek Lamberti901ea112019-12-10 22:07:09 +00001135std::unique_ptr<IWorkload> IWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& /*descriptor*/,
1136 const WorkloadInfo& /*info*/) const
Jim Flynn4ed6c832019-05-20 11:02:46 +01001137{
1138 return std::unique_ptr<IWorkload>();
1139}
1140
Derek Lamberti901ea112019-12-10 22:07:09 +00001141std::unique_ptr<IWorkload> IWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& /*descriptor*/,
1142 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001143{
1144 return std::unique_ptr<IWorkload>();
1145}
1146
Derek Lamberti901ea112019-12-10 22:07:09 +00001147std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp16ToFp32(const ConvertFp16ToFp32QueueDescriptor& /*desc*/,
1148 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001149{
1150 return std::unique_ptr<IWorkload>();
1151}
1152
Derek Lamberti901ea112019-12-10 22:07:09 +00001153std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToFp16(const ConvertFp32ToFp16QueueDescriptor& /*desc*/,
1154 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001155{
1156 return std::unique_ptr<IWorkload>();
1157}
1158
Derek Lamberti901ea112019-12-10 22:07:09 +00001159std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& /*descriptor*/,
1160 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001161{
1162 return std::unique_ptr<IWorkload>();
1163}
1164
Derek Lamberti901ea112019-12-10 22:07:09 +00001165std::unique_ptr<IWorkload> IWorkloadFactory::CreateDebug(const DebugQueueDescriptor& /*descriptor*/,
1166 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001167{
1168 return std::unique_ptr<IWorkload>();
1169}
1170
Derek Lamberti901ea112019-12-10 22:07:09 +00001171std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& /*descriptor*/,
1172 const WorkloadInfo& /*info*/) const
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +01001173{
1174 return std::unique_ptr<IWorkload>();
1175}
1176
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001177std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthwiseConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00001178 const DepthwiseConvolution2dQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001179{
1180 return std::unique_ptr<IWorkload>();
1181}
1182
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00001183std::unique_ptr<IWorkload> IWorkloadFactory::CreateDequantize(
Derek Lamberti901ea112019-12-10 22:07:09 +00001184 const DequantizeQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00001185{
1186 return std::unique_ptr<IWorkload>();
1187}
1188
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001189std::unique_ptr<IWorkload> IWorkloadFactory::CreateDetectionPostProcess(
Derek Lamberti901ea112019-12-10 22:07:09 +00001190 const DetectionPostProcessQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001191{
1192 return std::unique_ptr<IWorkload>();
1193}
1194
Derek Lamberti901ea112019-12-10 22:07:09 +00001195std::unique_ptr<IWorkload> IWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& /*descriptor*/,
1196 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001197{
1198 return std::unique_ptr<IWorkload>();
1199}
1200
josh minor4a3c6102020-01-06 16:40:46 -06001201std::unique_ptr<IWorkload> IWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& /*desc*/,
1202 const WorkloadInfo& /*info*/) const
1203{
1204 return std::unique_ptr<IWorkload>();
1205}
1206
Derek Lamberti901ea112019-12-10 22:07:09 +00001207std::unique_ptr<IWorkload> IWorkloadFactory::CreateEqual(const EqualQueueDescriptor& /*descriptor*/,
1208 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001209{
1210 return std::unique_ptr<IWorkload>();
1211}
1212
Derek Lamberti901ea112019-12-10 22:07:09 +00001213std::unique_ptr<IWorkload> IWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& /*desc*/,
1214 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001215{
1216 return std::unique_ptr<IWorkload>();
1217}
1218
Derek Lamberti901ea112019-12-10 22:07:09 +00001219std::unique_ptr<IWorkload> IWorkloadFactory::CreateFloor(const FloorQueueDescriptor& /*descriptor*/,
1220 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001221{
1222 return std::unique_ptr<IWorkload>();
1223}
1224
Derek Lamberti901ea112019-12-10 22:07:09 +00001225std::unique_ptr<IWorkload> IWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& /*descriptor*/,
1226 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001227{
1228 return std::unique_ptr<IWorkload>();
1229}
1230
Derek Lamberti901ea112019-12-10 22:07:09 +00001231std::unique_ptr<IWorkload> IWorkloadFactory::CreateGather(const GatherQueueDescriptor& /*descriptor*/,
1232 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001233{
1234 return std::unique_ptr<IWorkload>();
1235}
1236
Derek Lamberti901ea112019-12-10 22:07:09 +00001237std::unique_ptr<IWorkload> IWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& /*descriptor*/,
1238 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001239{
1240 return std::unique_ptr<IWorkload>();
1241}
1242
Kevin Mayce5045a2019-10-02 14:07:47 +01001243std::unique_ptr<IWorkload> IWorkloadFactory::CreateInstanceNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00001244 const InstanceNormalizationQueueDescriptor& /*descriptor*/,
1245 const WorkloadInfo& /*info*/) const
Kevin Mayce5045a2019-10-02 14:07:47 +01001246{
1247 return std::unique_ptr<IWorkload>();
1248}
1249
Derek Lamberti901ea112019-12-10 22:07:09 +00001250std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& /*desc*/,
1251 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001252{
1253 return std::unique_ptr<IWorkload>();
1254}
1255
Derek Lamberti901ea112019-12-10 22:07:09 +00001256std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& /*descriptor*/,
1257 const WorkloadInfo& /*info*/) const
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +01001258{
1259 return std::unique_ptr<IWorkload>();
1260}
1261
Derek Lamberti901ea112019-12-10 22:07:09 +00001262std::unique_ptr<IWorkload> IWorkloadFactory::CreateLstm(const LstmQueueDescriptor& /*descriptor*/,
1263 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001264{
1265 return std::unique_ptr<IWorkload>();
1266}
1267
Derek Lamberti901ea112019-12-10 22:07:09 +00001268std::unique_ptr<IWorkload> IWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& /*descriptor*/,
1269 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001270{
1271 return std::unique_ptr<IWorkload>();
1272}
1273
Derek Lamberti901ea112019-12-10 22:07:09 +00001274std::unique_ptr<IWorkload> IWorkloadFactory::CreateMean(const MeanQueueDescriptor& /*descriptor*/,
1275 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001276{
1277 return std::unique_ptr<IWorkload>();
1278}
1279
Derek Lamberti901ea112019-12-10 22:07:09 +00001280std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& /*descriptor*/,
1281 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001282{
1283 return std::unique_ptr<IWorkload>();
1284}
1285
Derek Lamberti901ea112019-12-10 22:07:09 +00001286std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& /*descriptor*/,
1287 const WorkloadInfo& /*info*/) const
Derek Lambertif674aa02019-08-01 15:56:25 +01001288{
1289 return std::unique_ptr<IWorkload>();
1290}
1291
Derek Lamberti901ea112019-12-10 22:07:09 +00001292std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerge(const MergeQueueDescriptor& /*descriptor*/,
1293 const WorkloadInfo& /*info*/) const
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01001294{
1295 return std::unique_ptr<IWorkload>();
1296}
1297
Derek Lamberti901ea112019-12-10 22:07:09 +00001298std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerger(const MergerQueueDescriptor& /*descriptor*/,
1299 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001300{
1301 return std::unique_ptr<IWorkload>();
1302}
1303
Derek Lamberti901ea112019-12-10 22:07:09 +00001304std::unique_ptr<IWorkload> IWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& /*descriptor*/,
1305 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001306{
1307 return std::unique_ptr<IWorkload>();
1308}
1309
Derek Lamberti901ea112019-12-10 22:07:09 +00001310std::unique_ptr<IWorkload> IWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& /*descriptor*/,
1311 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001312{
1313 return std::unique_ptr<IWorkload>();
1314}
1315
Derek Lamberti901ea112019-12-10 22:07:09 +00001316std::unique_ptr<IWorkload> IWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& /*descriptor*/,
1317 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001318{
1319 return std::unique_ptr<IWorkload>();
1320}
1321
Derek Lamberti901ea112019-12-10 22:07:09 +00001322std::unique_ptr<IWorkload> IWorkloadFactory::CreateOutput(const OutputQueueDescriptor& /*descriptor*/,
1323 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001324{
1325 return std::unique_ptr<IWorkload>();
1326}
1327
Derek Lamberti901ea112019-12-10 22:07:09 +00001328std::unique_ptr<IWorkload> IWorkloadFactory::CreatePad(const PadQueueDescriptor& /*descriptor*/,
1329 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001330{
1331 return std::unique_ptr<IWorkload>();
1332}
1333
Derek Lamberti901ea112019-12-10 22:07:09 +00001334std::unique_ptr<IWorkload> IWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& /*descriptor*/,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001335 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001336{
1337 return std::unique_ptr<IWorkload>();
1338}
1339
Derek Lamberti901ea112019-12-10 22:07:09 +00001340std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& /*descriptor*/,
1341 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001342{
1343 return std::unique_ptr<IWorkload>();
1344}
1345
Derek Lamberti901ea112019-12-10 22:07:09 +00001346std::unique_ptr<IWorkload> IWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
1347 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001348{
1349 return std::unique_ptr<IWorkload>();
1350}
1351
Derek Lamberti901ea112019-12-10 22:07:09 +00001352std::unique_ptr<IWorkload> IWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &/*descriptor*/,
1353 const WorkloadInfo &/*info*/) const
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001354{
1355 return std::unique_ptr<IWorkload>();
1356}
1357
Derek Lamberti901ea112019-12-10 22:07:09 +00001358std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& /*descriptor*/,
1359 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001360{
1361 return std::unique_ptr<IWorkload>();
1362}
1363
Derek Lamberti901ea112019-12-10 22:07:09 +00001364std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& /*descriptor*/,
1365 const WorkloadInfo& /*info*/) const
James Conroyee18dc82019-07-17 11:27:46 +01001366{
1367 return std::unique_ptr<IWorkload>();
1368}
1369
Derek Lamberti901ea112019-12-10 22:07:09 +00001370std::unique_ptr<IWorkload> IWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& /*descriptor*/,
1371 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001372{
1373 return std::unique_ptr<IWorkload>();
1374}
1375
Derek Lamberti901ea112019-12-10 22:07:09 +00001376std::unique_ptr<IWorkload> IWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& /*descriptor*/,
1377 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001378{
1379 return std::unique_ptr<IWorkload>();
1380}
1381
Derek Lamberti901ea112019-12-10 22:07:09 +00001382std::unique_ptr<IWorkload> IWorkloadFactory::CreateResize(const ResizeQueueDescriptor& /*descriptor*/,
1383 const WorkloadInfo& /*info*/) const
Teresa Charlina9075df2019-06-27 15:41:57 +01001384{
1385 return std::unique_ptr<IWorkload>();
1386}
1387
Derek Lamberti901ea112019-12-10 22:07:09 +00001388std::unique_ptr<IWorkload> IWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& /*descriptor*/,
1389 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001390{
1391 return std::unique_ptr<IWorkload>();
1392}
1393
Derek Lamberti901ea112019-12-10 22:07:09 +00001394std::unique_ptr<IWorkload> IWorkloadFactory::CreateSlice(const SliceQueueDescriptor& /*descriptor*/,
1395 const WorkloadInfo& /*info*/) const
1396{
1397 return std::unique_ptr<IWorkload>();
1398}
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001399
Derek Lamberti901ea112019-12-10 22:07:09 +00001400std::unique_ptr<IWorkload> IWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& /*descriptor*/,
1401 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001402{
1403 return std::unique_ptr<IWorkload>();
1404}
1405
Derek Lamberti901ea112019-12-10 22:07:09 +00001406std::unique_ptr<IWorkload> IWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& /*descriptor*/,
1407 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001408{
1409 return std::unique_ptr<IWorkload>();
1410}
1411
Derek Lamberti901ea112019-12-10 22:07:09 +00001412std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& /*descriptor*/,
1413 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001414{
1415 return std::unique_ptr<IWorkload>();
1416}
1417
Derek Lamberti901ea112019-12-10 22:07:09 +00001418std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& /*descriptor*/,
1419 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001420{
1421 return std::unique_ptr<IWorkload>();
1422}
1423
Derek Lamberti901ea112019-12-10 22:07:09 +00001424std::unique_ptr<IWorkload> IWorkloadFactory::CreateStack(const StackQueueDescriptor& /*descriptor*/,
1425 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001426{
1427 return std::unique_ptr<IWorkload>();
1428}
1429
Derek Lamberti901ea112019-12-10 22:07:09 +00001430std::unique_ptr<IWorkload> IWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& /*descriptor*/,
1431 const WorkloadInfo& /*info*/) const
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001432{
1433 return std::unique_ptr<IWorkload>();
1434}
1435
Derek Lamberti901ea112019-12-10 22:07:09 +00001436std::unique_ptr<IWorkload> IWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& /*descriptor*/,
1437 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001438{
1439 return std::unique_ptr<IWorkload>();
1440}
1441
Derek Lamberti901ea112019-12-10 22:07:09 +00001442std::unique_ptr<IWorkload> IWorkloadFactory::CreateSwitch(const SwitchQueueDescriptor& /*descriptor*/,
1443 const WorkloadInfo& /*info*/) const
Sadik Armaganeff363d2019-04-05 15:25:46 +01001444{
1445 return std::unique_ptr<IWorkload>();
1446}
1447
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001448std::unique_ptr<IWorkload> IWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& /*descriptor*/,
1449 const WorkloadInfo& /*info*/) const
1450{
1451 return std::unique_ptr<IWorkload>();
1452}
1453
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001454std::unique_ptr<IWorkload> IWorkloadFactory::CreateTransposeConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00001455 const TransposeConvolution2dQueueDescriptor& /*descriptor*/,
1456 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001457{
1458 return std::unique_ptr<IWorkload>();
surmeh013537c2c2018-05-18 16:31:43 +01001459}
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001460
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001461} // namepsace armnn