blob: 54a4157fe3d8acca518beff35617c6d58ce1f302 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00005
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <Layer.hpp>
7#include <LayersFwd.hpp>
David Beckdcb751f2018-10-03 11:42:42 +01008
David Beckb4540be2018-09-24 13:18:27 +01009#include <armnn/Types.hpp>
10#include <armnn/LayerSupport.hpp>
David Beck111b5d92018-11-12 14:59:37 +000011#include <armnn/ILayerSupport.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000012#include <armnn/BackendRegistry.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010013#include <armnn/utility/PolymorphicDowncast.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000014
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000015#include <backendsCommon/WorkloadFactory.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000016#include <backendsCommon/CpuTensorHandle.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000017
Francis Murtagh46c09d02019-05-28 08:15:28 +010018#include <backendsCommon/test/WorkloadTestUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000019
telsoa014fcda012018-03-09 14:13:49 +000020#include <boost/iterator/transform_iterator.hpp>
21
David Beck111b5d92018-11-12 14:59:37 +000022#include <sstream>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000023
telsoa014fcda012018-03-09 14:13:49 +000024namespace armnn
25{
26
telsoa01c577f2c2018-08-31 09:22:23 +010027namespace
28{
telsoa01c577f2c2018-08-31 09:22:23 +010029
David Beck29c75de2018-10-23 13:35:58 +010030const TensorInfo OverrideDataType(const TensorInfo& info, Optional<DataType> type)
31{
32 if (!type)
33 {
34 return info;
telsoa01c577f2c2018-08-31 09:22:23 +010035 }
36
David Beck29c75de2018-10-23 13:35:58 +010037 return TensorInfo(info.GetShape(), type.value(), info.GetQuantizationScale(), info.GetQuantizationOffset());
telsoa01c577f2c2018-08-31 09:22:23 +010038}
39
David Beck29c75de2018-10-23 13:35:58 +010040} // anonymous namespace
41
Sadik Armagan045f6be2020-09-10 13:37:32 +010042bool IWorkloadFactory::IsLayerConfigurationSupported(const BackendId& backendId,
43 const IConnectableLayer& connectableLayer,
44 Optional<DataType> dataType,
45 std::string& outReasonIfUnsupported,
46 const ModelOptions& modelOptions)
telsoa014fcda012018-03-09 14:13:49 +000047{
David Beck33f0ae02018-10-18 15:13:56 +010048 Optional<std::string&> reason = outReasonIfUnsupported;
telsoa014fcda012018-03-09 14:13:49 +000049 bool result;
Jan Eilersbb446e52020-04-02 13:56:54 +010050 const Layer& layer = *(PolymorphicDowncast<const Layer*>(&connectableLayer));
David Beckdcb751f2018-10-03 11:42:42 +010051
David Beck111b5d92018-11-12 14:59:37 +000052 auto const& backendRegistry = BackendRegistryInstance();
53 if (!backendRegistry.IsBackendRegistered(backendId))
54 {
55 std::stringstream ss;
56 ss << connectableLayer.GetName() << " is not supported on " << backendId
57 << " because this backend is not registered.";
58
59 outReasonIfUnsupported = ss.str();
60 return false;
61 }
62
63 auto backendFactory = backendRegistry.GetFactory(backendId);
64 auto backendObject = backendFactory();
Sadik Armagan045f6be2020-09-10 13:37:32 +010065 auto layerSupportObject = backendObject->GetLayerSupport(modelOptions);
David Beck33f0ae02018-10-18 15:13:56 +010066
telsoa014fcda012018-03-09 14:13:49 +000067 switch(layer.GetType())
68 {
69 case LayerType::Activation:
70 {
Jan Eilersbb446e52020-04-02 13:56:54 +010071 auto cLayer = PolymorphicDowncast<const ActivationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +000072 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +010073 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010074 result = layerSupportObject->IsActivationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010075 OverrideDataType(input, dataType),
76 OverrideDataType(output, dataType),
77 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +010078 reason);
telsoa014fcda012018-03-09 14:13:49 +000079 break;
80 }
81 case LayerType::Addition:
82 {
83 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
84 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
85 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010086 result = layerSupportObject->IsAdditionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010087 OverrideDataType(input0, dataType),
88 OverrideDataType(input1, dataType),
89 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +010090 reason);
telsoa014fcda012018-03-09 14:13:49 +000091 break;
92 }
Nikhil Rajee391d52019-09-05 17:50:44 +010093 case LayerType::ArgMinMax:
94 {
Jan Eilersbb446e52020-04-02 13:56:54 +010095 auto cLayer = PolymorphicDowncast<const ArgMinMaxLayer*>(&layer);
Nikhil Rajee391d52019-09-05 17:50:44 +010096 const ArgMinMaxDescriptor& descriptor = cLayer->GetParameters();
97
98 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
99 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
100 result = layerSupportObject->IsArgMinMaxSupported(
101 OverrideDataType(input, dataType),
Narumol Prangnawaratd1f57732019-10-31 14:24:02 +0000102 OverrideDataType(output, DataType::Signed32),
Nikhil Rajee391d52019-09-05 17:50:44 +0100103 descriptor,
104 reason);
105 break;
106 }
telsoa014fcda012018-03-09 14:13:49 +0000107 case LayerType::BatchNormalization:
108 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100109 auto cLayer = PolymorphicDowncast<const BatchNormalizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000110 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100111 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
112 const TensorInfo& mean = cLayer->m_Mean->GetTensorInfo();
113 const TensorInfo& var = cLayer->m_Variance->GetTensorInfo();
114 const TensorInfo& beta = cLayer->m_Beta->GetTensorInfo();
115 const TensorInfo& gamma = cLayer->m_Gamma->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100116 result = layerSupportObject->IsBatchNormalizationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100117 OverrideDataType(input, dataType),
118 OverrideDataType(output, dataType),
119 OverrideDataType(mean, dataType),
120 OverrideDataType(var, dataType),
121 OverrideDataType(beta, dataType),
122 OverrideDataType(gamma, dataType),
123 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100124 reason);
telsoa014fcda012018-03-09 14:13:49 +0000125 break;
126 }
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000127 case LayerType::BatchToSpaceNd:
128 {
129 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
130 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Jan Eilersbb446e52020-04-02 13:56:54 +0100131 auto cLayer = PolymorphicDowncast<const BatchToSpaceNdLayer*>(&layer);
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000132
133 result = layerSupportObject->IsBatchToSpaceNdSupported(OverrideDataType(input, dataType),
134 OverrideDataType(output, dataType),
135 cLayer->GetParameters(),
136 reason);
137 break;
138 }
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100139 case LayerType::Comparison:
140 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100141 auto cLayer = PolymorphicDowncast<const ComparisonLayer*>(&layer);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100142
143 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
144 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
145 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
146
147 result = layerSupportObject->IsComparisonSupported(OverrideDataType(input0, dataType),
148 OverrideDataType(input1, dataType),
149 OverrideDataType(output, DataType::Boolean),
150 cLayer->GetParameters(),
151 reason);
152 break;
153 }
telsoa014fcda012018-03-09 14:13:49 +0000154 case LayerType::Constant:
155 {
156 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100157 result = layerSupportObject->IsConstantSupported(OverrideDataType(output, dataType), reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100158 break;
159 }
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000160 case LayerType::ConvertBf16ToFp32:
161 {
162 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
163 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
164 result = layerSupportObject->IsConvertBf16ToFp32Supported(input, output, reason);
165 break;
166 }
telsoa01c577f2c2018-08-31 09:22:23 +0100167 case LayerType::ConvertFp16ToFp32:
168 {
169 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
170 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100171 result = layerSupportObject->IsConvertFp16ToFp32Supported(input, output, reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100172 break;
173 }
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000174 case LayerType::ConvertFp32ToBf16:
175 {
176 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
177 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
178 result = layerSupportObject->IsConvertFp32ToBf16Supported(input, output, reason);
179 break;
180 }
telsoa01c577f2c2018-08-31 09:22:23 +0100181 case LayerType::ConvertFp32ToFp16:
182 {
183 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
184 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100185 result = layerSupportObject->IsConvertFp32ToFp16Supported(input, output, reason);
telsoa014fcda012018-03-09 14:13:49 +0000186 break;
187 }
188 case LayerType::Convolution2d:
189 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100190 auto cLayer = PolymorphicDowncast<const Convolution2dLayer*>(&layer);
arovir01a6824102018-08-28 17:40:45 +0100191
192 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
193 dataType);
telsoa01c577f2c2018-08-31 09:22:23 +0100194 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100195 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
surmeh013537c2c2018-05-18 16:31:43 +0100196
arovir01a6824102018-08-28 17:40:45 +0100197 const Convolution2dDescriptor& descriptor = cLayer->GetParameters();
surmeh013537c2c2018-05-18 16:31:43 +0100198
arovir01a6824102018-08-28 17:40:45 +0100199 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100200 Optional<TensorInfo> biases;
surmeh013537c2c2018-05-18 16:31:43 +0100201 if (descriptor.m_BiasEnabled)
202 {
David Beck5eec11d2018-10-04 15:43:17 +0100203 biases =
204 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
surmeh013537c2c2018-05-18 16:31:43 +0100205 }
206
David Beck33f0ae02018-10-18 15:13:56 +0100207 result = layerSupportObject->IsConvolution2dSupported(
surmeh013537c2c2018-05-18 16:31:43 +0100208 input,
209 output,
210 descriptor,
telsoa01c577f2c2018-08-31 09:22:23 +0100211 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100212 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100213 reason);
telsoa014fcda012018-03-09 14:13:49 +0000214 break;
215 }
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000216 case LayerType::Debug:
217 {
218 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
219 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
220
221 result = layerSupportObject->IsDebugSupported(OverrideDataType(input, dataType),
222 OverrideDataType(output, dataType),
223 reason);
224 break;
225 }
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100226 case LayerType::DepthToSpace:
227 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100228 auto cLayer = PolymorphicDowncast<const DepthToSpaceLayer*>(&layer);
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100229
230 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
231 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
232
233 result = layerSupportObject->IsDepthToSpaceSupported(OverrideDataType(input, dataType),
234 OverrideDataType(output, dataType),
235 cLayer->GetParameters(),
236 reason);
237 break;
238 }
telsoa014fcda012018-03-09 14:13:49 +0000239 case LayerType::DepthwiseConvolution2d:
240 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100241 auto cLayer = PolymorphicDowncast<const DepthwiseConvolution2dLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100242 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
243 dataType);
244 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100245 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100246
telsoa01c577f2c2018-08-31 09:22:23 +0100247 const DepthwiseConvolution2dDescriptor& descriptor = cLayer->GetParameters();
arovir01a6824102018-08-28 17:40:45 +0100248
249 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100250 Optional<TensorInfo> biases;
telsoa01c577f2c2018-08-31 09:22:23 +0100251 if (descriptor.m_BiasEnabled)
252 {
David Beck5eec11d2018-10-04 15:43:17 +0100253 biases =
254 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
telsoa01c577f2c2018-08-31 09:22:23 +0100255 }
telsoa01c577f2c2018-08-31 09:22:23 +0100256
David Beck33f0ae02018-10-18 15:13:56 +0100257 result = layerSupportObject->IsDepthwiseConvolutionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100258 input,
259 output,
260 descriptor,
261 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100262 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100263 reason);
telsoa014fcda012018-03-09 14:13:49 +0000264 break;
265 }
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000266 case LayerType::Dequantize:
267 {
268 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
269 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
270
Aron Virginas-Tar87972be2019-11-13 15:16:28 +0000271 result = layerSupportObject->IsDequantizeSupported(input,
272 OverrideDataType(output, dataType),
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000273 reason);
274 break;
275 }
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000276 case LayerType::DetectionPostProcess:
277 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100278 auto cLayer = PolymorphicDowncast<const DetectionPostProcessLayer*>(&layer);
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000279 const TensorInfo& boxEncodings = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
280 const TensorInfo& scores = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
281 const TensorInfo& anchors = cLayer->m_Anchors->GetTensorInfo();
282
283 const TensorInfo& detectionBoxes = layer.GetOutputSlot(0).GetTensorInfo();
284 const TensorInfo& detectionClasses = layer.GetOutputSlot(1).GetTensorInfo();
285 const TensorInfo& detectionScores = layer.GetOutputSlot(2).GetTensorInfo();
286 const TensorInfo& numDetections = layer.GetOutputSlot(3).GetTensorInfo();
287
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000288 const DetectionPostProcessDescriptor& descriptor = cLayer->GetParameters();
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000289 result = layerSupportObject->IsDetectionPostProcessSupported(boxEncodings,
290 scores,
291 anchors,
292 detectionBoxes,
293 detectionClasses,
294 detectionScores,
295 numDetections,
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000296 descriptor,
297 reason);
298 break;
299 }
josh minor4a3c6102020-01-06 16:40:46 -0600300 case LayerType::ElementwiseUnary:
301 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100302 auto cLayer = PolymorphicDowncast<const ElementwiseUnaryLayer*>(&layer);
josh minor4a3c6102020-01-06 16:40:46 -0600303
304 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
305 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
306
307 result = layerSupportObject->IsElementwiseUnarySupported(OverrideDataType(input, dataType),
308 OverrideDataType(output, dataType),
309 cLayer->GetParameters(),
310 reason);
311 break;
312 }
Ryan OSheaec6c6802020-06-05 17:17:06 +0100313 case LayerType::Fill:
314 {
315 auto cLayer = PolymorphicDowncast<const FillLayer*>(&layer);
316 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
317 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
318 const FillDescriptor& descriptor = cLayer->GetParameters();
319
320 result = layerSupportObject->IsFillSupported(
321 OverrideDataType(input, dataType),
322 OverrideDataType(output, dataType),
323 descriptor,
324 reason);
325 break;
326 }
telsoa014fcda012018-03-09 14:13:49 +0000327 case LayerType::FakeQuantization:
328 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100329 auto cLayer = PolymorphicDowncast<const FakeQuantizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000330 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100331 result = layerSupportObject->IsFakeQuantizationSupported(OverrideDataType(input, dataType),
332 cLayer->GetParameters(),
333 reason);
telsoa014fcda012018-03-09 14:13:49 +0000334 break;
335 }
336 case LayerType::Floor:
337 {
338 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
339 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100340 result = layerSupportObject->IsFloorSupported(OverrideDataType(input, dataType),
341 OverrideDataType(output, dataType),
342 reason);
telsoa014fcda012018-03-09 14:13:49 +0000343 break;
344 }
345 case LayerType::FullyConnected:
346 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100347 auto cLayer = PolymorphicDowncast<const FullyConnectedLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000348 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100349 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100350 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100351
352 TensorInfo biasInfo;
353 const TensorInfo * biasInfoPtr = nullptr;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000354 static const TensorInfo dummyBFloat16Bias(TensorShape({1,1,1,1}), DataType::BFloat16);
telsoa01c577f2c2018-08-31 09:22:23 +0100355 static const TensorInfo dummyFloat16Bias(TensorShape({1,1,1,1}), DataType::Float16);
356 static const TensorInfo dummyFloat32Bias(TensorShape({1,1,1,1}), DataType::Float32);
357 static const TensorInfo dummyQA8Bias(TensorShape({1,1,1,1}), DataType::Signed32);
358
359 const FullyConnectedDescriptor& descriptor = cLayer->GetParameters();
360 if (descriptor.m_BiasEnabled)
361 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100362 ARMNN_ASSERT(cLayer->m_Bias.get() != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100363 biasInfo = OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
364 biasInfoPtr = &biasInfo;
365 }
366 else
367 {
368 // If biases are not enabled pass a dummy tensorinfo for the validation
369 switch(input.GetDataType())
370 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000371 case DataType::BFloat16:
372 {
373 biasInfoPtr = &dummyBFloat16Bias;
374 break;
375 }
telsoa01c577f2c2018-08-31 09:22:23 +0100376 case DataType::Float16:
377 {
378 biasInfoPtr = &dummyFloat16Bias;
379 break;
380 }
381 case DataType::Float32:
382 {
383 biasInfoPtr = &dummyFloat32Bias;
384 break;
385 }
Derek Lambertif90c56d2020-01-10 17:14:08 +0000386 case DataType::QAsymmU8:
Keith Davisa8565012020-02-14 12:22:40 +0000387 case DataType::QAsymmS8:
Keith Davis9d0ff742020-02-03 14:47:54 +0000388 case DataType::QSymmS8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000389 case DataType::QSymmS16:
telsoa01c577f2c2018-08-31 09:22:23 +0100390 {
391 biasInfoPtr = &dummyQA8Bias;
392 break;
393 }
394 default:
395 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100396 ARMNN_ASSERT_MSG(false, "Unexpected bias type");
telsoa01c577f2c2018-08-31 09:22:23 +0100397 }
398 }
399 }
400
David Beck33f0ae02018-10-18 15:13:56 +0100401 result = layerSupportObject->IsFullyConnectedSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100402 OverrideDataType(input, dataType),
403 OverrideDataType(output, dataType),
404 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
405 *biasInfoPtr,
406 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100407 reason);
telsoa014fcda012018-03-09 14:13:49 +0000408 break;
409 }
narpra01b89b05f2019-01-16 09:53:09 +0000410 case LayerType::Gather:
411 {
412 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
413 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
414 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Teresa Charlin52664732020-06-29 16:27:03 +0100415 auto cLayer = PolymorphicDowncast<const GatherLayer*>(&layer);
416 const GatherDescriptor& descriptor = cLayer->GetParameters();
narpra01b89b05f2019-01-16 09:53:09 +0000417 result = layerSupportObject->IsGatherSupported(OverrideDataType(input0, dataType),
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100418 input1,
narpra01b89b05f2019-01-16 09:53:09 +0000419 OverrideDataType(output, dataType),
Teresa Charlin52664732020-06-29 16:27:03 +0100420 descriptor,
narpra01b89b05f2019-01-16 09:53:09 +0000421 reason);
422 break;
423 }
telsoa014fcda012018-03-09 14:13:49 +0000424 case LayerType::Input:
425 {
426 const TensorInfo& input = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100427 result = layerSupportObject->IsInputSupported(OverrideDataType(input, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000428 break;
429 }
Kevin Mayce5045a2019-10-02 14:07:47 +0100430 case LayerType::InstanceNormalization:
431 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100432 auto cLayer = PolymorphicDowncast<const InstanceNormalizationLayer*>(&layer);
Kevin Mayce5045a2019-10-02 14:07:47 +0100433 const InstanceNormalizationDescriptor& descriptor = cLayer->GetParameters();
434
435 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
436 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
437
438 result = layerSupportObject->IsInstanceNormalizationSupported(
439 OverrideDataType(input, dataType),
440 OverrideDataType(output, dataType),
441 descriptor,
442 reason);
443 break;
444 }
telsoa014fcda012018-03-09 14:13:49 +0000445 case LayerType::L2Normalization:
446 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100447 auto cLayer = PolymorphicDowncast<const L2NormalizationLayer*>(&layer);
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100448 const L2NormalizationDescriptor& descriptor = cLayer->GetParameters();
449
telsoa014fcda012018-03-09 14:13:49 +0000450 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100451 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100452
David Beck33f0ae02018-10-18 15:13:56 +0100453 result = layerSupportObject->IsL2NormalizationSupported(
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100454 OverrideDataType(input, dataType),
455 OverrideDataType(output, dataType),
456 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100457 reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100458 break;
459 }
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100460 case LayerType::LogSoftmax:
461 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100462 auto cLayer = PolymorphicDowncast<const LogSoftmaxLayer*>(&layer);
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100463
464 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
465 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
466
467 result = layerSupportObject->IsLogSoftmaxSupported(OverrideDataType(input, dataType),
468 OverrideDataType(output, dataType),
469 cLayer->GetParameters(),
470 reason);
471 break;
472 }
telsoa01c577f2c2018-08-31 09:22:23 +0100473 case LayerType::Lstm:
474 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100475 auto cLayer = PolymorphicDowncast<const LstmLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100476 const LstmDescriptor& descriptor = cLayer->GetParameters();
477
478 // All inputs.
479 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
480 dataType);
481 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
482 dataType);
483 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
484 dataType);
485 // All outputs
486 const TensorInfo& scratchBuffer = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
487 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
488 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
489 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(3).GetTensorInfo(), dataType);
490
491 // Basic parameters
492 const TensorInfo& inputToForgetWeights
493 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
494 const TensorInfo& inputToCellWeights
495 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
496 const TensorInfo& inputToOutputWeights
497 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
498 const TensorInfo& recurrentToForgetWeights
499 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
500 const TensorInfo& recurrentToCellWeights
501 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
502 const TensorInfo& recurrentToOutputWeights
503 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
504 const TensorInfo& forgetGateBias
505 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
506 const TensorInfo& cellBias
507 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
508 const TensorInfo& outputGateBias
509 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
510
Jan Eilersd01a83c2019-07-03 18:20:40 +0100511 LstmInputParamsInfo paramsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100512
Jan Eilersd01a83c2019-07-03 18:20:40 +0100513 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
514 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
515 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
516 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
517 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
518 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
519 paramsInfo.m_ForgetGateBias = &forgetGateBias;
520 paramsInfo.m_CellBias = &cellBias;
521 paramsInfo.m_OutputGateBias = &outputGateBias;
522
523
524 // Optional parameters
telsoa01c577f2c2018-08-31 09:22:23 +0100525 TensorInfo optInputToInputWeights;
526 TensorInfo optRecurrentToInputWeights;
527 TensorInfo optCellToInputWeights;
528 TensorInfo optInputGateBias;
529 TensorInfo optProjectionWeights;
530 TensorInfo optProjectionBias;
531 TensorInfo optCellToForgetWeights;
532 TensorInfo optCellToOutputWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100533 TensorInfo optInputLayerNormWeights;
534 TensorInfo optForgetLayerNormWeights;
535 TensorInfo optCellLayerNormWeights;
536 TensorInfo optOutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100537
538 if(!descriptor.m_CifgEnabled)
539 {
540 optInputToInputWeights =
541 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100542 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100543
544 optRecurrentToInputWeights =
545 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100546 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100547 optInputGateBias =
548 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100549 paramsInfo.m_InputGateBias = &optInputGateBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100550 }
551
552 if(descriptor.m_ProjectionEnabled)
553 {
554 optProjectionWeights =
555 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100556 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100557 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
558 {
559 optProjectionBias =
560 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100561 paramsInfo.m_ProjectionBias = &optProjectionBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100562 }
563 }
564
565 if(descriptor.m_PeepholeEnabled)
566 {
Jan Eilerse2062cd2020-03-30 15:07:45 +0100567 if(!descriptor.m_CifgEnabled)
568 {
569 optCellToInputWeights =
570 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
571 dataType);
572 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
573 }
telsoa01c577f2c2018-08-31 09:22:23 +0100574 optCellToForgetWeights =
575 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100576 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100577 optCellToOutputWeights =
578 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100579 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100580 }
581
Jan Eilers38e05bd2019-06-26 13:10:09 +0100582 if(descriptor.m_LayerNormEnabled)
583 {
Ferran Balaguere30c16e2019-07-24 17:03:45 +0100584 if (!descriptor.m_CifgEnabled)
585 {
586 optInputLayerNormWeights = OverrideDataType(
587 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
588 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
589 }
Jan Eilers38e05bd2019-06-26 13:10:09 +0100590
591 optForgetLayerNormWeights = OverrideDataType(
592 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100593 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100594
595 optCellLayerNormWeights = OverrideDataType(
596 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100597 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100598
599 optOutputLayerNormWeights = OverrideDataType(
600 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100601 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100602 }
603
David Beck33f0ae02018-10-18 15:13:56 +0100604 result = layerSupportObject->IsLstmSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100605 input,
606 outputStateIn,
607 cellStateIn,
608 scratchBuffer,
609 outputStateOut,
610 cellStateOut,
611 output,
612 descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +0100613 paramsInfo,
614 reason);
telsoa014fcda012018-03-09 14:13:49 +0000615 break;
616 }
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000617 case LayerType::Maximum:
618 {
619 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
620 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
621 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
622
623 result = layerSupportObject->IsMaximumSupported(OverrideDataType(input0, dataType),
624 OverrideDataType(input1, dataType),
625 OverrideDataType(output, dataType),
626 reason);
627 break;
628 }
narpra01b89b05f2019-01-16 09:53:09 +0000629 case LayerType::MemCopy:
630 {
631 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
632 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000633
narpra01b89b05f2019-01-16 09:53:09 +0000634 result = layerSupportObject->IsMemCopySupported(OverrideDataType(input, dataType),
635 OverrideDataType(output, dataType),
636 reason);
637 break;
638 }
Derek Lambertif674aa02019-08-01 15:56:25 +0100639 case LayerType::MemImport:
640 {
641 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
642 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
643
644 result = layerSupportObject->IsMemImportSupported(OverrideDataType(input, dataType),
645 OverrideDataType(output, dataType),
646 reason);
647 break;
648 }
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100649 case LayerType::Merge:
650 {
651 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
652 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
653 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
654
655 result = layerSupportObject->IsMergeSupported(OverrideDataType(input0, dataType),
656 OverrideDataType(input1, dataType),
657 OverrideDataType(output, dataType),
658 reason);
659 break;
660 }
Jim Flynne242f2d2019-05-22 14:24:13 +0100661 case LayerType::Concat:
telsoa014fcda012018-03-09 14:13:49 +0000662 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100663 auto cLayer = PolymorphicDowncast<const ConcatLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000664
telsoa01c577f2c2018-08-31 09:22:23 +0100665 // Get vector of all inputs.
666 auto getTensorInfo = [&dataType](const InputSlot& slot)
telsoa014fcda012018-03-09 14:13:49 +0000667 {
telsoa01c577f2c2018-08-31 09:22:23 +0100668 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
telsoa014fcda012018-03-09 14:13:49 +0000669 };
telsoa01c577f2c2018-08-31 09:22:23 +0100670 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
671 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
672 std::vector<TensorInfo> inputs(beginI, endI);
telsoa014fcda012018-03-09 14:13:49 +0000673
telsoa01c577f2c2018-08-31 09:22:23 +0100674 auto getTensorInfoPtr = [](const TensorInfo& info)
675 {
676 return &info;
677 };
678 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
679 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
680 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
telsoa014fcda012018-03-09 14:13:49 +0000681
Nikhil Raj8599a412018-11-19 14:51:07 +0000682 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
683
Jim Flynne242f2d2019-05-22 14:24:13 +0100684 result = layerSupportObject->IsConcatSupported(inputPtrs, output, cLayer->GetParameters(), reason);
685
686
telsoa014fcda012018-03-09 14:13:49 +0000687 break;
688 }
689 case LayerType::Multiplication:
690 {
691 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
692 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100693 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100694 result = layerSupportObject->IsMultiplicationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100695 OverrideDataType(input0, dataType),
696 OverrideDataType(input1, dataType),
697 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100698 reason);
telsoa014fcda012018-03-09 14:13:49 +0000699 break;
700 }
701 case LayerType::Normalization:
702 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100703 auto cLayer = PolymorphicDowncast<const NormalizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000704 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
705 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100706 result = layerSupportObject->IsNormalizationSupported(OverrideDataType(input, dataType),
707 OverrideDataType(output, dataType),
708 cLayer->GetParameters(),
709 reason);
telsoa014fcda012018-03-09 14:13:49 +0000710 break;
711 }
712 case LayerType::Output:
713 {
714 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100715 result = layerSupportObject->IsOutputSupported(OverrideDataType(output, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000716 break;
717 }
718 case LayerType::Permute:
719 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100720 auto cLayer = PolymorphicDowncast<const PermuteLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000721 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
722 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100723 result = layerSupportObject->IsPermuteSupported(OverrideDataType(input, dataType),
724 OverrideDataType(output, dataType),
725 cLayer->GetParameters(),
726 reason);
telsoa014fcda012018-03-09 14:13:49 +0000727 break;
728 }
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100729 case LayerType::Pad:
730 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100731 auto cLayer = PolymorphicDowncast<const PadLayer*>(&layer);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100732 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
733 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100734 result = layerSupportObject->IsPadSupported(
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100735 OverrideDataType(input, dataType),
736 OverrideDataType(output, dataType),
737 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100738 reason);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100739 break;
740 }
telsoa014fcda012018-03-09 14:13:49 +0000741 case LayerType::Pooling2d:
742 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100743 auto cLayer = PolymorphicDowncast<const Pooling2dLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000744 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
745 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100746 result = layerSupportObject->IsPooling2dSupported(OverrideDataType(input, dataType),
747 OverrideDataType(output, dataType),
748 cLayer->GetParameters(),
749 reason);
telsoa014fcda012018-03-09 14:13:49 +0000750 break;
751 }
Matteo Martincigh49124022019-01-11 13:25:59 +0000752 case LayerType::PreCompiled:
753 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100754 auto cLayer = PolymorphicDowncast<const PreCompiledLayer*>(&layer);
Matteo Martincigh49124022019-01-11 13:25:59 +0000755 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
756 result = layerSupportObject->IsPreCompiledSupported(OverrideDataType(input, dataType),
757 cLayer->GetParameters(),
758 reason);
759 break;
760 }
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000761 case LayerType::Quantize:
762 {
763 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
764 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
765 result = layerSupportObject->IsQuantizeSupported(input, output, reason);
766 break;
767 }
James Conroy586a9aa2020-03-20 08:49:33 +0000768 case LayerType::QLstm:
769 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100770 auto cLayer = PolymorphicDowncast<const QLstmLayer*>(&layer);
James Conroy586a9aa2020-03-20 08:49:33 +0000771 const QLstmDescriptor& descriptor = cLayer->GetParameters();
772
773 // Inputs
774 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
775 const TensorInfo& previousOutputIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
776 const TensorInfo& previousCellStateIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
777
778 // Outputs
779 const TensorInfo& outputStateOut = layer.GetOutputSlot(0).GetTensorInfo();
780 const TensorInfo& cellStateOut = layer.GetOutputSlot(1).GetTensorInfo();
781 const TensorInfo& output = layer.GetOutputSlot(2).GetTensorInfo();
782
783 // Lstm parameters
784 LstmInputParamsInfo paramsInfo;
785
786 // Basic parameters
787 paramsInfo.m_InputToForgetWeights = &cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo();
788 paramsInfo.m_InputToCellWeights = &cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo();
789 paramsInfo.m_InputToOutputWeights = &cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo();
790
791 paramsInfo.m_RecurrentToForgetWeights =
792 &cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo();
793 paramsInfo.m_RecurrentToCellWeights =
794 &cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo();
795 paramsInfo.m_RecurrentToOutputWeights =
796 &cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo();
797
798 paramsInfo.m_ForgetGateBias = &cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo();
799 paramsInfo.m_CellBias = &cLayer->m_BasicParameters.m_CellBias->GetTensorInfo();
800 paramsInfo.m_OutputGateBias = &cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo();
801
802 if(!descriptor.m_CifgEnabled)
803 {
804 paramsInfo.m_InputToInputWeights = &cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo();
805 paramsInfo.m_RecurrentToInputWeights =
806 &cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo();
807 paramsInfo.m_InputGateBias = &cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo();
808 }
809
810 if(descriptor.m_ProjectionEnabled)
811 {
812 paramsInfo.m_ProjectionWeights = &cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo();
James Conroyed324052020-05-18 15:16:42 +0100813
814 // Projection bias is optional even if projection is enabled
815 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
816 {
817 paramsInfo.m_ProjectionBias = &cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo();
818 }
James Conroy586a9aa2020-03-20 08:49:33 +0000819 }
820
821 if(descriptor.m_PeepholeEnabled)
822 {
823 if (!descriptor.m_CifgEnabled)
824 {
825 paramsInfo.m_CellToInputWeights =
826 &cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo();
827 }
828
829 paramsInfo.m_CellToForgetWeights =
830 &cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo();
831 paramsInfo.m_CellToOutputWeights = &cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo();
832 }
833
834 if(descriptor.m_LayerNormEnabled)
835 {
836 if (!descriptor.m_CifgEnabled)
837 {
838 paramsInfo.m_InputLayerNormWeights =
839 &cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo();
840 }
841
842 paramsInfo.m_ForgetLayerNormWeights =
843 &cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo();
844 paramsInfo.m_CellLayerNormWeights =
845 &cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo();
846 paramsInfo.m_OutputLayerNormWeights =
847 &cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo();
848 }
849
850 result = layerSupportObject->IsQLstmSupported(input,
851 previousOutputIn,
852 previousCellStateIn,
853 outputStateOut,
854 cellStateOut,
855 output,
856 descriptor,
857 paramsInfo,
858 reason);
859 break;
860 }
James Conroyee18dc82019-07-17 11:27:46 +0100861 case LayerType::QuantizedLstm:
862 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100863 auto cLayer = PolymorphicDowncast<const QuantizedLstmLayer*>(&layer);
James Conroyee18dc82019-07-17 11:27:46 +0100864
865 // Inputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100866 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
867 const TensorInfo& previousCellStateIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
868 const TensorInfo& previousOutputIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100869
870 // Outputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100871 const TensorInfo& cellStateOut = layer.GetOutputSlot(0).GetTensorInfo();
872 const TensorInfo& output = layer.GetOutputSlot(1).GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100873
874 // QuantizedLstm parameters
James Conroyee18dc82019-07-17 11:27:46 +0100875 QuantizedLstmInputParamsInfo paramsInfo;
876
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100877 paramsInfo.m_InputToInputWeights =
878 &cLayer->m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo();
879 paramsInfo.m_InputToForgetWeights =
880 &cLayer->m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo();
881 paramsInfo.m_InputToCellWeights =
882 &cLayer->m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo();
883 paramsInfo.m_InputToOutputWeights =
884 &cLayer->m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100885
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100886 paramsInfo.m_RecurrentToInputWeights =
887 &cLayer->m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo();
888 paramsInfo.m_RecurrentToForgetWeights =
889 &cLayer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo();
890 paramsInfo.m_RecurrentToCellWeights =
891 &cLayer->m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo();
892 paramsInfo.m_RecurrentToOutputWeights =
893 &cLayer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100894
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100895 paramsInfo.m_InputGateBias =
896 &cLayer->m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo();
897 paramsInfo.m_ForgetGateBias =
898 &cLayer->m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo();
899 paramsInfo.m_CellBias =
900 &cLayer->m_QuantizedLstmParameters.m_CellBias->GetTensorInfo();
901 paramsInfo.m_OutputGateBias =
902 &cLayer->m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo();;
James Conroyee18dc82019-07-17 11:27:46 +0100903
904 result = layerSupportObject->IsQuantizedLstmSupported(input,
905 previousCellStateIn,
906 previousOutputIn,
907 cellStateOut,
908 output,
909 paramsInfo,
910 reason);
911 break;
912 }
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100913 case LayerType::Division:
914 {
915 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
916 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
917 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100918 result = layerSupportObject->IsDivisionSupported(
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100919 OverrideDataType(input0, dataType),
920 OverrideDataType(input1, dataType),
921 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100922 reason);
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100923 break;
924 }
Finn Williams2605b232020-06-10 15:53:46 +0100925 case LayerType::Rank:
926 {
927 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
928 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
929 result = layerSupportObject->IsRankSupported(OverrideDataType(input, dataType),
930 OverrideDataType(output, dataType),
931 reason);
932 break;
933 }
telsoa014fcda012018-03-09 14:13:49 +0000934 case LayerType::Reshape:
935 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100936 auto cLayer = PolymorphicDowncast<const ReshapeLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000937 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Kevin Maya023c402019-12-12 17:28:05 +0000938 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000939 result = layerSupportObject->IsReshapeSupported(OverrideDataType(input, dataType),
Kevin Maya023c402019-12-12 17:28:05 +0000940 OverrideDataType(output, dataType),
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000941 cLayer->GetParameters(),
942 reason);
telsoa014fcda012018-03-09 14:13:49 +0000943 break;
944 }
Teresa Charlina9075df2019-06-27 15:41:57 +0100945 case LayerType::Resize:
946 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100947 auto cLayer = PolymorphicDowncast<const ResizeLayer*>(&layer);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100948 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Teresa Charlina9075df2019-06-27 15:41:57 +0100949 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
950 result = layerSupportObject->IsResizeSupported(OverrideDataType(input, dataType),
951 OverrideDataType(output, dataType),
952 cLayer->GetParameters(),
953 reason);
954 break;
955 }
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100956 case LayerType::Slice:
957 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100958 auto cLayer = PolymorphicDowncast<const SliceLayer*>(&layer);
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100959
960 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
961 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
962
963 result = layerSupportObject->IsSliceSupported(OverrideDataType(input, dataType),
964 OverrideDataType(output, dataType),
965 cLayer->GetParameters(),
966 reason);
967 break;
968 }
telsoa014fcda012018-03-09 14:13:49 +0000969 case LayerType::Softmax:
970 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100971 auto cLayer = PolymorphicDowncast<const SoftmaxLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000972 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100973 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100974 result = layerSupportObject->IsSoftmaxSupported(OverrideDataType(input, dataType),
975 OverrideDataType(output, dataType),
976 cLayer->GetParameters(),
977 reason);
telsoa014fcda012018-03-09 14:13:49 +0000978 break;
979 }
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000980 case LayerType::SpaceToBatchNd:
981 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100982 auto cLayer = PolymorphicDowncast<const SpaceToBatchNdLayer*>(&layer);
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000983 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
984 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
985 result = layerSupportObject->IsSpaceToBatchNdSupported(OverrideDataType(input, dataType),
986 OverrideDataType(output, dataType),
987 cLayer->GetParameters(),
988 reason);
989 break;
990 }
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100991 case LayerType::SpaceToDepth:
992 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100993 auto cLayer = PolymorphicDowncast<const SpaceToDepthLayer*>(&layer);
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100994
995 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
996 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
997
998 result = layerSupportObject->IsSpaceToDepthSupported(OverrideDataType(input, dataType),
999 OverrideDataType(output, dataType),
1000 cLayer->GetParameters(),
1001 reason);
1002 break;
1003 }
telsoa014fcda012018-03-09 14:13:49 +00001004 case LayerType::Splitter:
1005 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001006 auto cLayer = PolymorphicDowncast<const SplitterLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +00001007 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001008
1009 // Get vector of all outputs.
1010 auto getTensorInfo = [&dataType](const OutputSlot& slot)
1011 {
1012 return OverrideDataType(slot.GetTensorInfo(), dataType);
1013 };
1014 auto beginI = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfo);
1015 auto endI = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfo);
1016 std::vector<TensorInfo> outputs(beginI, endI);
1017
1018 const std::vector<std::reference_wrapper<TensorInfo>> outputPtrs(outputs.begin(), outputs.end());
1019
David Beck33f0ae02018-10-18 15:13:56 +01001020 result = layerSupportObject->IsSplitterSupported(OverrideDataType(input, dataType),
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001021 outputPtrs,
David Beck33f0ae02018-10-18 15:13:56 +01001022 cLayer->GetParameters(),
1023 reason);
telsoa014fcda012018-03-09 14:13:49 +00001024 break;
1025 }
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001026 case LayerType::Stack:
1027 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001028 auto cLayer = PolymorphicDowncast<const StackLayer*>(&layer);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001029
1030 // Get vector of all inputs.
1031 auto getTensorInfo = [&dataType](const InputSlot& slot)
1032 {
1033 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
1034 };
1035 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
1036 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
1037 std::vector<TensorInfo> inputs(beginI, endI);
1038
1039 auto getTensorInfoPtr = [](const TensorInfo& info)
1040 {
1041 return &info;
1042 };
1043 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
1044 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
1045 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
1046
1047 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1048
1049 result = layerSupportObject->IsStackSupported(inputPtrs, output, cLayer->GetParameters(), reason);
1050
1051 break;
1052 }
Derek Lamberti013c3902019-10-21 10:46:16 +01001053 case LayerType::StandIn:
1054 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001055 auto cLayer = PolymorphicDowncast<const StandInLayer*>(&layer);
Derek Lamberti013c3902019-10-21 10:46:16 +01001056
1057 // Get vector of all inputs.
1058 auto getTensorInfoIn = [&dataType](const InputSlot& slot)
1059 {
1060 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
1061 };
1062 auto getTensorInfoOut = [&dataType](const OutputSlot& slot)
1063 {
1064 return OverrideDataType(slot.GetTensorInfo(), dataType);
1065 };
1066 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfoIn);
1067 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfoIn);
1068 std::vector<TensorInfo> inputs(beginI, endI);
1069
1070 auto beginO = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfoOut);
1071 auto endO = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfoOut);
1072 std::vector<TensorInfo> outputs(beginO, endO);
1073
1074
1075 auto getTensorInfoPtr = [](const TensorInfo& info)
1076 {
1077 return &info;
1078 };
1079 auto beginPtrI = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
1080 auto endPtrI = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
1081 std::vector<const TensorInfo*> inputPtrs(beginPtrI, endPtrI);
1082
1083 auto beginPtrO = boost::make_transform_iterator(outputs.begin(), getTensorInfoPtr);
1084 auto endPtrO = boost::make_transform_iterator(outputs.end(), getTensorInfoPtr);
1085 std::vector<const TensorInfo*> outputPtrs(beginPtrO, endPtrO);
1086
1087
1088 result = layerSupportObject->IsStandInSupported(inputPtrs,
1089 outputPtrs,
1090 cLayer->GetParameters(),
1091 reason);
1092 break;
1093 }
Conor Kennedy430b5d82018-11-14 15:28:28 +00001094 case LayerType::StridedSlice:
1095 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001096 auto cLayer = PolymorphicDowncast<const StridedSliceLayer*>(&layer);
Conor Kennedy430b5d82018-11-14 15:28:28 +00001097 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1098 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1099 result = layerSupportObject->IsStridedSliceSupported(OverrideDataType(input, dataType),
1100 OverrideDataType(output, dataType),
1101 cLayer->GetParameters(),
1102 reason);
1103 break;
1104 }
David Beckc2044fe2018-09-05 15:00:38 +01001105 case LayerType::Subtraction:
1106 {
1107 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1108 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1109 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +01001110 result = layerSupportObject->IsSubtractionSupported(
David Beckc2044fe2018-09-05 15:00:38 +01001111 OverrideDataType(input0, dataType),
1112 OverrideDataType(input1, dataType),
1113 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +01001114 reason);
David Beckc2044fe2018-09-05 15:00:38 +01001115 break;
1116 }
Sadik Armaganeff363d2019-04-05 15:25:46 +01001117 case LayerType::Switch:
1118 {
1119 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1120 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1121 const TensorInfo& output0 = layer.GetOutputSlot(0).GetTensorInfo();
1122 const TensorInfo& output1 = layer.GetOutputSlot(1).GetTensorInfo();
1123 result = layerSupportObject->IsSwitchSupported(OverrideDataType(input0, dataType),
1124 OverrideDataType(input1, dataType),
1125 OverrideDataType(output0, dataType),
1126 OverrideDataType(output1, dataType),
1127 reason);
1128 break;
1129 }
narpra0132b90462018-09-13 11:07:48 +01001130 case LayerType::Mean:
1131 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001132 auto cLayer = PolymorphicDowncast<const MeanLayer*>(&layer);
narpra0132b90462018-09-13 11:07:48 +01001133 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1134 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +01001135 result = layerSupportObject->IsMeanSupported(
narpra0132b90462018-09-13 11:07:48 +01001136 OverrideDataType(input, dataType),
1137 OverrideDataType(output, dataType),
1138 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +01001139 reason);
narpra0132b90462018-09-13 11:07:48 +01001140 break;
1141 }
kevmay0190539692018-11-29 08:40:19 +00001142 case LayerType::Minimum:
1143 {
1144 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1145 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1146 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1147 result = layerSupportObject->IsMinimumSupported(OverrideDataType(input0, dataType),
1148 OverrideDataType(input1, dataType),
1149 OverrideDataType(output, dataType),
1150 reason);
1151 break;
1152 }
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001153 case LayerType::Prelu:
1154 {
1155 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1156 const TensorInfo& alpha = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1157 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1158 result = layerSupportObject->IsPreluSupported(OverrideDataType(input, dataType),
1159 OverrideDataType(alpha, dataType),
1160 OverrideDataType(output, dataType),
1161 reason);
1162 break;
1163 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001164 case LayerType::Transpose:
1165 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001166 auto cLayer = PolymorphicDowncast<const TransposeLayer*>(&layer);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001167 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1168 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1169 result = layerSupportObject->IsTransposeSupported(OverrideDataType(input, dataType),
1170 OverrideDataType(output, dataType),
1171 cLayer->GetParameters(),
1172 reason);
1173 break;
1174 }
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001175 case LayerType::TransposeConvolution2d:
1176 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001177 auto cLayer = PolymorphicDowncast<const TransposeConvolution2dLayer*>(&layer);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001178
1179 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1180 dataType);
1181 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1182
1183 const TransposeConvolution2dDescriptor& descriptor = cLayer->GetParameters();
1184
1185 Optional<TensorInfo> biases;
1186 if (descriptor.m_BiasEnabled)
1187 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001188 ARMNN_ASSERT(cLayer->m_Bias.get() != nullptr);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001189 biases = OverrideDataType(cLayer->m_Bias->GetTensorInfo(),
1190 GetBiasTypeFromWeightsType(dataType));
1191 }
1192
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001193 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001194 const TensorInfo weights = OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType);
1195
1196 result = layerSupportObject->IsTransposeConvolution2dSupported(input,
1197 output,
1198 descriptor,
1199 weights,
1200 biases,
1201 reason);
1202
1203 break;
1204 }
telsoa014fcda012018-03-09 14:13:49 +00001205 default:
1206 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001207 ARMNN_ASSERT_MSG(false, "WorkloadFactory did not recognise type of layer.");
David Beck33f0ae02018-10-18 15:13:56 +01001208 reason.value() = "Unrecognised layer type";
telsoa014fcda012018-03-09 14:13:49 +00001209 result = false;
1210 break;
1211 }
1212 }
telsoa014fcda012018-03-09 14:13:49 +00001213 return result;
1214}
1215
Sadik Armagan045f6be2020-09-10 13:37:32 +01001216bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
1217 const IConnectableLayer& connectableLayer,
1218 Optional<DataType> dataType,
1219 std::string& outReasonIfUnsupported)
1220{
1221 return IsLayerConfigurationSupported(backendId, connectableLayer, dataType, outReasonIfUnsupported);
1222}
1223
David Beckdcb751f2018-10-03 11:42:42 +01001224bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +01001225 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +01001226 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +00001227{
Jan Eilersbb446e52020-04-02 13:56:54 +01001228 auto layer = PolymorphicDowncast<const Layer*>(&connectableLayer);
Sadik Armagan045f6be2020-09-10 13:37:32 +01001229 return IsLayerConfigurationSupported(layer->GetBackendId(), connectableLayer, dataType, outReasonIfUnsupported);
1230}
1231
1232// TODO merge with defaulted modelOptions above
1233bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
1234 Optional<DataType> dataType,
1235 std::string& outReasonIfUnsupported,
1236 const ModelOptions& modelOptions)
1237{
1238 auto layer = PolymorphicDowncast<const Layer*>(&connectableLayer);
1239 return IsLayerConfigurationSupported(layer->GetBackendId(),
1240 connectableLayer,
1241 dataType,
1242 outReasonIfUnsupported,
1243 modelOptions);
telsoa014fcda012018-03-09 14:13:49 +00001244}
1245
Sadik Armagan04a72972020-09-14 15:44:18 +01001246bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
1247 const IConnectableLayer& connectableLayer,
1248 Optional<DataType> dataType,
1249 std::string& outReasonIfUnsupported,
1250 const ModelOptions& modelOptions)
1251{
1252 return IsLayerConfigurationSupported(backendId,
1253 connectableLayer,
1254 dataType,
1255 outReasonIfUnsupported,
1256 modelOptions);
1257}
1258
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001259// Default Implementations
Derek Lamberti901ea112019-12-10 22:07:09 +00001260std::unique_ptr<IWorkload> IWorkloadFactory::CreateAbs(const AbsQueueDescriptor& /*descriptor*/,
1261 const WorkloadInfo& /*info*/) const
Kevin May868eb142019-09-04 17:29:31 +01001262{
1263 return std::unique_ptr<IWorkload>();
1264}
1265
Derek Lamberti901ea112019-12-10 22:07:09 +00001266std::unique_ptr<IWorkload> IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& /*descriptor*/,
1267 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001268{
1269 return std::unique_ptr<IWorkload>();
1270}
1271
Derek Lamberti901ea112019-12-10 22:07:09 +00001272std::unique_ptr<IWorkload> IWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& /*descriptor*/,
1273 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001274{
1275 return std::unique_ptr<IWorkload>();
1276}
1277
Derek Lamberti901ea112019-12-10 22:07:09 +00001278std::unique_ptr<IWorkload> IWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& /*descriptor*/,
1279 const WorkloadInfo& /*info*/) const
Nikhil Rajee391d52019-09-05 17:50:44 +01001280{
1281 return std::unique_ptr<IWorkload>();
1282}
1283
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001284std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00001285 const BatchNormalizationQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001286{
1287 return std::unique_ptr<IWorkload>();
1288}
1289
Derek Lamberti901ea112019-12-10 22:07:09 +00001290std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& /*desc*/,
1291 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001292{
1293 return std::unique_ptr<IWorkload>();
1294}
1295
Derek Lamberti901ea112019-12-10 22:07:09 +00001296std::unique_ptr<IWorkload> IWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& /*descriptor*/,
1297 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001298{
1299 return std::unique_ptr<IWorkload>();
1300}
1301
Derek Lamberti901ea112019-12-10 22:07:09 +00001302std::unique_ptr<IWorkload> IWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& /*descriptor*/,
1303 const WorkloadInfo& /*info*/) const
Jim Flynn4ed6c832019-05-20 11:02:46 +01001304{
1305 return std::unique_ptr<IWorkload>();
1306}
1307
Derek Lamberti901ea112019-12-10 22:07:09 +00001308std::unique_ptr<IWorkload> IWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& /*descriptor*/,
1309 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001310{
1311 return std::unique_ptr<IWorkload>();
1312}
1313
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +00001314std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertBf16ToFp32(const ConvertBf16ToFp32QueueDescriptor& /*desc*/,
1315 const WorkloadInfo& /*info*/) const
1316{
1317 return std::unique_ptr<IWorkload>();
1318}
1319
Derek Lamberti901ea112019-12-10 22:07:09 +00001320std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp16ToFp32(const ConvertFp16ToFp32QueueDescriptor& /*desc*/,
1321 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001322{
1323 return std::unique_ptr<IWorkload>();
1324}
1325
Narumol Prangnawaratea54a012020-03-16 16:36:10 +00001326std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToBf16(const ConvertFp32ToBf16QueueDescriptor& /*desc*/,
1327 const WorkloadInfo& /*info*/) const
1328{
1329 return std::unique_ptr<IWorkload>();
1330}
1331
Derek Lamberti901ea112019-12-10 22:07:09 +00001332std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToFp16(const ConvertFp32ToFp16QueueDescriptor& /*desc*/,
1333 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001334{
1335 return std::unique_ptr<IWorkload>();
1336}
1337
Derek Lamberti901ea112019-12-10 22:07:09 +00001338std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& /*descriptor*/,
1339 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001340{
1341 return std::unique_ptr<IWorkload>();
1342}
1343
Derek Lamberti901ea112019-12-10 22:07:09 +00001344std::unique_ptr<IWorkload> IWorkloadFactory::CreateDebug(const DebugQueueDescriptor& /*descriptor*/,
1345 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001346{
1347 return std::unique_ptr<IWorkload>();
1348}
1349
Derek Lamberti901ea112019-12-10 22:07:09 +00001350std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& /*descriptor*/,
1351 const WorkloadInfo& /*info*/) const
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +01001352{
1353 return std::unique_ptr<IWorkload>();
1354}
1355
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001356std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthwiseConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00001357 const DepthwiseConvolution2dQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001358{
1359 return std::unique_ptr<IWorkload>();
1360}
1361
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00001362std::unique_ptr<IWorkload> IWorkloadFactory::CreateDequantize(
Derek Lamberti901ea112019-12-10 22:07:09 +00001363 const DequantizeQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00001364{
1365 return std::unique_ptr<IWorkload>();
1366}
1367
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001368std::unique_ptr<IWorkload> IWorkloadFactory::CreateDetectionPostProcess(
Derek Lamberti901ea112019-12-10 22:07:09 +00001369 const DetectionPostProcessQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001370{
1371 return std::unique_ptr<IWorkload>();
1372}
1373
Derek Lamberti901ea112019-12-10 22:07:09 +00001374std::unique_ptr<IWorkload> IWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& /*descriptor*/,
1375 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001376{
1377 return std::unique_ptr<IWorkload>();
1378}
1379
josh minor4a3c6102020-01-06 16:40:46 -06001380std::unique_ptr<IWorkload> IWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& /*desc*/,
1381 const WorkloadInfo& /*info*/) const
1382{
1383 return std::unique_ptr<IWorkload>();
1384}
1385
Derek Lamberti901ea112019-12-10 22:07:09 +00001386std::unique_ptr<IWorkload> IWorkloadFactory::CreateEqual(const EqualQueueDescriptor& /*descriptor*/,
1387 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001388{
1389 return std::unique_ptr<IWorkload>();
1390}
1391
Derek Lamberti901ea112019-12-10 22:07:09 +00001392std::unique_ptr<IWorkload> IWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& /*desc*/,
1393 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001394{
1395 return std::unique_ptr<IWorkload>();
1396}
1397
Ryan OSheaec6c6802020-06-05 17:17:06 +01001398std::unique_ptr<IWorkload> IWorkloadFactory::CreateFill(const FillQueueDescriptor& /*descriptor*/,
1399 const WorkloadInfo& /*info*/) const
1400{
1401 return std::unique_ptr<IWorkload>();
1402}
1403
Derek Lamberti901ea112019-12-10 22:07:09 +00001404std::unique_ptr<IWorkload> IWorkloadFactory::CreateFloor(const FloorQueueDescriptor& /*descriptor*/,
1405 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001406{
1407 return std::unique_ptr<IWorkload>();
1408}
1409
Derek Lamberti901ea112019-12-10 22:07:09 +00001410std::unique_ptr<IWorkload> IWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& /*descriptor*/,
1411 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001412{
1413 return std::unique_ptr<IWorkload>();
1414}
1415
Derek Lamberti901ea112019-12-10 22:07:09 +00001416std::unique_ptr<IWorkload> IWorkloadFactory::CreateGather(const GatherQueueDescriptor& /*descriptor*/,
1417 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001418{
1419 return std::unique_ptr<IWorkload>();
1420}
1421
Derek Lamberti901ea112019-12-10 22:07:09 +00001422std::unique_ptr<IWorkload> IWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& /*descriptor*/,
1423 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001424{
1425 return std::unique_ptr<IWorkload>();
1426}
1427
Kevin Mayce5045a2019-10-02 14:07:47 +01001428std::unique_ptr<IWorkload> IWorkloadFactory::CreateInstanceNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00001429 const InstanceNormalizationQueueDescriptor& /*descriptor*/,
1430 const WorkloadInfo& /*info*/) const
Kevin Mayce5045a2019-10-02 14:07:47 +01001431{
1432 return std::unique_ptr<IWorkload>();
1433}
1434
Derek Lamberti901ea112019-12-10 22:07:09 +00001435std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& /*desc*/,
1436 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001437{
1438 return std::unique_ptr<IWorkload>();
1439}
1440
Derek Lamberti901ea112019-12-10 22:07:09 +00001441std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& /*descriptor*/,
1442 const WorkloadInfo& /*info*/) const
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +01001443{
1444 return std::unique_ptr<IWorkload>();
1445}
1446
Derek Lamberti901ea112019-12-10 22:07:09 +00001447std::unique_ptr<IWorkload> IWorkloadFactory::CreateLstm(const LstmQueueDescriptor& /*descriptor*/,
1448 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001449{
1450 return std::unique_ptr<IWorkload>();
1451}
1452
Derek Lamberti901ea112019-12-10 22:07:09 +00001453std::unique_ptr<IWorkload> IWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& /*descriptor*/,
1454 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001455{
1456 return std::unique_ptr<IWorkload>();
1457}
1458
Derek Lamberti901ea112019-12-10 22:07:09 +00001459std::unique_ptr<IWorkload> IWorkloadFactory::CreateMean(const MeanQueueDescriptor& /*descriptor*/,
1460 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001461{
1462 return std::unique_ptr<IWorkload>();
1463}
1464
Derek Lamberti901ea112019-12-10 22:07:09 +00001465std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& /*descriptor*/,
1466 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001467{
1468 return std::unique_ptr<IWorkload>();
1469}
1470
Derek Lamberti901ea112019-12-10 22:07:09 +00001471std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& /*descriptor*/,
1472 const WorkloadInfo& /*info*/) const
Derek Lambertif674aa02019-08-01 15:56:25 +01001473{
1474 return std::unique_ptr<IWorkload>();
1475}
1476
Derek Lamberti901ea112019-12-10 22:07:09 +00001477std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerge(const MergeQueueDescriptor& /*descriptor*/,
1478 const WorkloadInfo& /*info*/) const
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01001479{
1480 return std::unique_ptr<IWorkload>();
1481}
1482
Derek Lamberti901ea112019-12-10 22:07:09 +00001483std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerger(const MergerQueueDescriptor& /*descriptor*/,
1484 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001485{
1486 return std::unique_ptr<IWorkload>();
1487}
1488
Derek Lamberti901ea112019-12-10 22:07:09 +00001489std::unique_ptr<IWorkload> IWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& /*descriptor*/,
1490 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001491{
1492 return std::unique_ptr<IWorkload>();
1493}
1494
Derek Lamberti901ea112019-12-10 22:07:09 +00001495std::unique_ptr<IWorkload> IWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& /*descriptor*/,
1496 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001497{
1498 return std::unique_ptr<IWorkload>();
1499}
1500
Derek Lamberti901ea112019-12-10 22:07:09 +00001501std::unique_ptr<IWorkload> IWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& /*descriptor*/,
1502 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001503{
1504 return std::unique_ptr<IWorkload>();
1505}
1506
Derek Lamberti901ea112019-12-10 22:07:09 +00001507std::unique_ptr<IWorkload> IWorkloadFactory::CreateOutput(const OutputQueueDescriptor& /*descriptor*/,
1508 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001509{
1510 return std::unique_ptr<IWorkload>();
1511}
1512
Derek Lamberti901ea112019-12-10 22:07:09 +00001513std::unique_ptr<IWorkload> IWorkloadFactory::CreatePad(const PadQueueDescriptor& /*descriptor*/,
1514 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001515{
1516 return std::unique_ptr<IWorkload>();
1517}
1518
Derek Lamberti901ea112019-12-10 22:07:09 +00001519std::unique_ptr<IWorkload> IWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& /*descriptor*/,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001520 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001521{
1522 return std::unique_ptr<IWorkload>();
1523}
1524
Derek Lamberti901ea112019-12-10 22:07:09 +00001525std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& /*descriptor*/,
1526 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001527{
1528 return std::unique_ptr<IWorkload>();
1529}
1530
Derek Lamberti901ea112019-12-10 22:07:09 +00001531std::unique_ptr<IWorkload> IWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
1532 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001533{
1534 return std::unique_ptr<IWorkload>();
1535}
1536
Derek Lamberti901ea112019-12-10 22:07:09 +00001537std::unique_ptr<IWorkload> IWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &/*descriptor*/,
1538 const WorkloadInfo &/*info*/) const
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001539{
1540 return std::unique_ptr<IWorkload>();
1541}
1542
Derek Lamberti901ea112019-12-10 22:07:09 +00001543std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& /*descriptor*/,
1544 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001545{
1546 return std::unique_ptr<IWorkload>();
1547}
1548
James Conroy586a9aa2020-03-20 08:49:33 +00001549std::unique_ptr<IWorkload> IWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& /*descriptor*/,
1550 const WorkloadInfo& /*info*/) const
1551{
1552 return std::unique_ptr<IWorkload>();
1553}
1554
Derek Lamberti901ea112019-12-10 22:07:09 +00001555std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& /*descriptor*/,
1556 const WorkloadInfo& /*info*/) const
James Conroyee18dc82019-07-17 11:27:46 +01001557{
1558 return std::unique_ptr<IWorkload>();
1559}
Finn Williams2605b232020-06-10 15:53:46 +01001560std::unique_ptr<IWorkload> IWorkloadFactory::CreateRank(const RankQueueDescriptor& /*descriptor*/,
1561 const WorkloadInfo& /*info*/) const
1562{
1563 return std::unique_ptr<IWorkload>();
1564}
James Conroyee18dc82019-07-17 11:27:46 +01001565
Derek Lamberti901ea112019-12-10 22:07:09 +00001566std::unique_ptr<IWorkload> IWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& /*descriptor*/,
1567 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001568{
1569 return std::unique_ptr<IWorkload>();
1570}
1571
Derek Lamberti901ea112019-12-10 22:07:09 +00001572std::unique_ptr<IWorkload> IWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& /*descriptor*/,
1573 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001574{
1575 return std::unique_ptr<IWorkload>();
1576}
1577
Derek Lamberti901ea112019-12-10 22:07:09 +00001578std::unique_ptr<IWorkload> IWorkloadFactory::CreateResize(const ResizeQueueDescriptor& /*descriptor*/,
1579 const WorkloadInfo& /*info*/) const
Teresa Charlina9075df2019-06-27 15:41:57 +01001580{
1581 return std::unique_ptr<IWorkload>();
1582}
1583
Derek Lamberti901ea112019-12-10 22:07:09 +00001584std::unique_ptr<IWorkload> IWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& /*descriptor*/,
1585 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001586{
1587 return std::unique_ptr<IWorkload>();
1588}
1589
Derek Lamberti901ea112019-12-10 22:07:09 +00001590std::unique_ptr<IWorkload> IWorkloadFactory::CreateSlice(const SliceQueueDescriptor& /*descriptor*/,
1591 const WorkloadInfo& /*info*/) const
1592{
1593 return std::unique_ptr<IWorkload>();
1594}
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001595
Derek Lamberti901ea112019-12-10 22:07:09 +00001596std::unique_ptr<IWorkload> IWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& /*descriptor*/,
1597 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001598{
1599 return std::unique_ptr<IWorkload>();
1600}
1601
Derek Lamberti901ea112019-12-10 22:07:09 +00001602std::unique_ptr<IWorkload> IWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& /*descriptor*/,
1603 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001604{
1605 return std::unique_ptr<IWorkload>();
1606}
1607
Derek Lamberti901ea112019-12-10 22:07:09 +00001608std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& /*descriptor*/,
1609 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001610{
1611 return std::unique_ptr<IWorkload>();
1612}
1613
Derek Lamberti901ea112019-12-10 22:07:09 +00001614std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& /*descriptor*/,
1615 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001616{
1617 return std::unique_ptr<IWorkload>();
1618}
1619
Derek Lamberti901ea112019-12-10 22:07:09 +00001620std::unique_ptr<IWorkload> IWorkloadFactory::CreateStack(const StackQueueDescriptor& /*descriptor*/,
1621 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001622{
1623 return std::unique_ptr<IWorkload>();
1624}
1625
Derek Lamberti901ea112019-12-10 22:07:09 +00001626std::unique_ptr<IWorkload> IWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& /*descriptor*/,
1627 const WorkloadInfo& /*info*/) const
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001628{
1629 return std::unique_ptr<IWorkload>();
1630}
1631
Derek Lamberti901ea112019-12-10 22:07:09 +00001632std::unique_ptr<IWorkload> IWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& /*descriptor*/,
1633 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001634{
1635 return std::unique_ptr<IWorkload>();
1636}
1637
Derek Lamberti901ea112019-12-10 22:07:09 +00001638std::unique_ptr<IWorkload> IWorkloadFactory::CreateSwitch(const SwitchQueueDescriptor& /*descriptor*/,
1639 const WorkloadInfo& /*info*/) const
Sadik Armaganeff363d2019-04-05 15:25:46 +01001640{
1641 return std::unique_ptr<IWorkload>();
1642}
1643
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001644std::unique_ptr<IWorkload> IWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& /*descriptor*/,
1645 const WorkloadInfo& /*info*/) const
1646{
1647 return std::unique_ptr<IWorkload>();
1648}
1649
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001650std::unique_ptr<IWorkload> IWorkloadFactory::CreateTransposeConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00001651 const TransposeConvolution2dQueueDescriptor& /*descriptor*/,
1652 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001653{
1654 return std::unique_ptr<IWorkload>();
surmeh013537c2c2018-05-18 16:31:43 +01001655}
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001656
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001657} // namepsace armnn