blob: 09d7c2d568bea1af36d506ae812507b2f73c226c [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00005
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <Layer.hpp>
7#include <LayersFwd.hpp>
David Beckdcb751f2018-10-03 11:42:42 +01008
David Beckb4540be2018-09-24 13:18:27 +01009#include <armnn/Types.hpp>
10#include <armnn/LayerSupport.hpp>
David Beck111b5d92018-11-12 14:59:37 +000011#include <armnn/ILayerSupport.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000012#include <armnn/BackendRegistry.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010013#include <armnn/utility/PolymorphicDowncast.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000014
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000015#include <backendsCommon/WorkloadFactory.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000016#include <backendsCommon/CpuTensorHandle.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000017
Francis Murtagh46c09d02019-05-28 08:15:28 +010018#include <backendsCommon/test/WorkloadTestUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000019
telsoa014fcda012018-03-09 14:13:49 +000020#include <boost/iterator/transform_iterator.hpp>
21
David Beck111b5d92018-11-12 14:59:37 +000022#include <sstream>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000023
telsoa014fcda012018-03-09 14:13:49 +000024namespace armnn
25{
26
telsoa01c577f2c2018-08-31 09:22:23 +010027namespace
28{
telsoa01c577f2c2018-08-31 09:22:23 +010029
David Beck29c75de2018-10-23 13:35:58 +010030const TensorInfo OverrideDataType(const TensorInfo& info, Optional<DataType> type)
31{
32 if (!type)
33 {
34 return info;
telsoa01c577f2c2018-08-31 09:22:23 +010035 }
36
David Beck29c75de2018-10-23 13:35:58 +010037 return TensorInfo(info.GetShape(), type.value(), info.GetQuantizationScale(), info.GetQuantizationOffset());
telsoa01c577f2c2018-08-31 09:22:23 +010038}
39
David Beck29c75de2018-10-23 13:35:58 +010040} // anonymous namespace
41
David Beck33f0ae02018-10-18 15:13:56 +010042bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
David Beckdcb751f2018-10-03 11:42:42 +010043 const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +010044 Optional<DataType> dataType,
David Beckdcb751f2018-10-03 11:42:42 +010045 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000046{
David Beck33f0ae02018-10-18 15:13:56 +010047 Optional<std::string&> reason = outReasonIfUnsupported;
telsoa014fcda012018-03-09 14:13:49 +000048 bool result;
Jan Eilersbb446e52020-04-02 13:56:54 +010049 const Layer& layer = *(PolymorphicDowncast<const Layer*>(&connectableLayer));
David Beckdcb751f2018-10-03 11:42:42 +010050
David Beck111b5d92018-11-12 14:59:37 +000051 auto const& backendRegistry = BackendRegistryInstance();
52 if (!backendRegistry.IsBackendRegistered(backendId))
53 {
54 std::stringstream ss;
55 ss << connectableLayer.GetName() << " is not supported on " << backendId
56 << " because this backend is not registered.";
57
58 outReasonIfUnsupported = ss.str();
59 return false;
60 }
61
62 auto backendFactory = backendRegistry.GetFactory(backendId);
63 auto backendObject = backendFactory();
64 auto layerSupportObject = backendObject->GetLayerSupport();
David Beck33f0ae02018-10-18 15:13:56 +010065
telsoa014fcda012018-03-09 14:13:49 +000066 switch(layer.GetType())
67 {
68 case LayerType::Activation:
69 {
Jan Eilersbb446e52020-04-02 13:56:54 +010070 auto cLayer = PolymorphicDowncast<const ActivationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +000071 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +010072 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010073 result = layerSupportObject->IsActivationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010074 OverrideDataType(input, dataType),
75 OverrideDataType(output, dataType),
76 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +010077 reason);
telsoa014fcda012018-03-09 14:13:49 +000078 break;
79 }
80 case LayerType::Addition:
81 {
82 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
83 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
84 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010085 result = layerSupportObject->IsAdditionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010086 OverrideDataType(input0, dataType),
87 OverrideDataType(input1, dataType),
88 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +010089 reason);
telsoa014fcda012018-03-09 14:13:49 +000090 break;
91 }
Nikhil Rajee391d52019-09-05 17:50:44 +010092 case LayerType::ArgMinMax:
93 {
Jan Eilersbb446e52020-04-02 13:56:54 +010094 auto cLayer = PolymorphicDowncast<const ArgMinMaxLayer*>(&layer);
Nikhil Rajee391d52019-09-05 17:50:44 +010095 const ArgMinMaxDescriptor& descriptor = cLayer->GetParameters();
96
97 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
98 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
99 result = layerSupportObject->IsArgMinMaxSupported(
100 OverrideDataType(input, dataType),
Narumol Prangnawaratd1f57732019-10-31 14:24:02 +0000101 OverrideDataType(output, DataType::Signed32),
Nikhil Rajee391d52019-09-05 17:50:44 +0100102 descriptor,
103 reason);
104 break;
105 }
telsoa014fcda012018-03-09 14:13:49 +0000106 case LayerType::BatchNormalization:
107 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100108 auto cLayer = PolymorphicDowncast<const BatchNormalizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000109 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100110 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
111 const TensorInfo& mean = cLayer->m_Mean->GetTensorInfo();
112 const TensorInfo& var = cLayer->m_Variance->GetTensorInfo();
113 const TensorInfo& beta = cLayer->m_Beta->GetTensorInfo();
114 const TensorInfo& gamma = cLayer->m_Gamma->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100115 result = layerSupportObject->IsBatchNormalizationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100116 OverrideDataType(input, dataType),
117 OverrideDataType(output, dataType),
118 OverrideDataType(mean, dataType),
119 OverrideDataType(var, dataType),
120 OverrideDataType(beta, dataType),
121 OverrideDataType(gamma, dataType),
122 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100123 reason);
telsoa014fcda012018-03-09 14:13:49 +0000124 break;
125 }
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000126 case LayerType::BatchToSpaceNd:
127 {
128 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
129 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Jan Eilersbb446e52020-04-02 13:56:54 +0100130 auto cLayer = PolymorphicDowncast<const BatchToSpaceNdLayer*>(&layer);
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000131
132 result = layerSupportObject->IsBatchToSpaceNdSupported(OverrideDataType(input, dataType),
133 OverrideDataType(output, dataType),
134 cLayer->GetParameters(),
135 reason);
136 break;
137 }
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100138 case LayerType::Comparison:
139 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100140 auto cLayer = PolymorphicDowncast<const ComparisonLayer*>(&layer);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100141
142 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
143 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
144 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
145
146 result = layerSupportObject->IsComparisonSupported(OverrideDataType(input0, dataType),
147 OverrideDataType(input1, dataType),
148 OverrideDataType(output, DataType::Boolean),
149 cLayer->GetParameters(),
150 reason);
151 break;
152 }
telsoa014fcda012018-03-09 14:13:49 +0000153 case LayerType::Constant:
154 {
155 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100156 result = layerSupportObject->IsConstantSupported(OverrideDataType(output, dataType), reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100157 break;
158 }
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000159 case LayerType::ConvertBf16ToFp32:
160 {
161 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
162 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
163 result = layerSupportObject->IsConvertBf16ToFp32Supported(input, output, reason);
164 break;
165 }
telsoa01c577f2c2018-08-31 09:22:23 +0100166 case LayerType::ConvertFp16ToFp32:
167 {
168 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
169 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100170 result = layerSupportObject->IsConvertFp16ToFp32Supported(input, output, reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100171 break;
172 }
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000173 case LayerType::ConvertFp32ToBf16:
174 {
175 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
176 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
177 result = layerSupportObject->IsConvertFp32ToBf16Supported(input, output, reason);
178 break;
179 }
telsoa01c577f2c2018-08-31 09:22:23 +0100180 case LayerType::ConvertFp32ToFp16:
181 {
182 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
183 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100184 result = layerSupportObject->IsConvertFp32ToFp16Supported(input, output, reason);
telsoa014fcda012018-03-09 14:13:49 +0000185 break;
186 }
187 case LayerType::Convolution2d:
188 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100189 auto cLayer = PolymorphicDowncast<const Convolution2dLayer*>(&layer);
arovir01a6824102018-08-28 17:40:45 +0100190
191 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
192 dataType);
telsoa01c577f2c2018-08-31 09:22:23 +0100193 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100194 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
surmeh013537c2c2018-05-18 16:31:43 +0100195
arovir01a6824102018-08-28 17:40:45 +0100196 const Convolution2dDescriptor& descriptor = cLayer->GetParameters();
surmeh013537c2c2018-05-18 16:31:43 +0100197
arovir01a6824102018-08-28 17:40:45 +0100198 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100199 Optional<TensorInfo> biases;
surmeh013537c2c2018-05-18 16:31:43 +0100200 if (descriptor.m_BiasEnabled)
201 {
David Beck5eec11d2018-10-04 15:43:17 +0100202 biases =
203 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
surmeh013537c2c2018-05-18 16:31:43 +0100204 }
205
David Beck33f0ae02018-10-18 15:13:56 +0100206 result = layerSupportObject->IsConvolution2dSupported(
surmeh013537c2c2018-05-18 16:31:43 +0100207 input,
208 output,
209 descriptor,
telsoa01c577f2c2018-08-31 09:22:23 +0100210 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100211 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100212 reason);
telsoa014fcda012018-03-09 14:13:49 +0000213 break;
214 }
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000215 case LayerType::Debug:
216 {
217 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
218 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
219
220 result = layerSupportObject->IsDebugSupported(OverrideDataType(input, dataType),
221 OverrideDataType(output, dataType),
222 reason);
223 break;
224 }
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100225 case LayerType::DepthToSpace:
226 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100227 auto cLayer = PolymorphicDowncast<const DepthToSpaceLayer*>(&layer);
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100228
229 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
230 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
231
232 result = layerSupportObject->IsDepthToSpaceSupported(OverrideDataType(input, dataType),
233 OverrideDataType(output, dataType),
234 cLayer->GetParameters(),
235 reason);
236 break;
237 }
telsoa014fcda012018-03-09 14:13:49 +0000238 case LayerType::DepthwiseConvolution2d:
239 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100240 auto cLayer = PolymorphicDowncast<const DepthwiseConvolution2dLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100241 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
242 dataType);
243 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100244 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100245
telsoa01c577f2c2018-08-31 09:22:23 +0100246 const DepthwiseConvolution2dDescriptor& descriptor = cLayer->GetParameters();
arovir01a6824102018-08-28 17:40:45 +0100247
248 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100249 Optional<TensorInfo> biases;
telsoa01c577f2c2018-08-31 09:22:23 +0100250 if (descriptor.m_BiasEnabled)
251 {
David Beck5eec11d2018-10-04 15:43:17 +0100252 biases =
253 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
telsoa01c577f2c2018-08-31 09:22:23 +0100254 }
telsoa01c577f2c2018-08-31 09:22:23 +0100255
David Beck33f0ae02018-10-18 15:13:56 +0100256 result = layerSupportObject->IsDepthwiseConvolutionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100257 input,
258 output,
259 descriptor,
260 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100261 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100262 reason);
telsoa014fcda012018-03-09 14:13:49 +0000263 break;
264 }
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000265 case LayerType::Dequantize:
266 {
267 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
268 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
269
Aron Virginas-Tar87972be2019-11-13 15:16:28 +0000270 result = layerSupportObject->IsDequantizeSupported(input,
271 OverrideDataType(output, dataType),
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000272 reason);
273 break;
274 }
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000275 case LayerType::DetectionPostProcess:
276 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100277 auto cLayer = PolymorphicDowncast<const DetectionPostProcessLayer*>(&layer);
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000278 const TensorInfo& boxEncodings = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
279 const TensorInfo& scores = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
280 const TensorInfo& anchors = cLayer->m_Anchors->GetTensorInfo();
281
282 const TensorInfo& detectionBoxes = layer.GetOutputSlot(0).GetTensorInfo();
283 const TensorInfo& detectionClasses = layer.GetOutputSlot(1).GetTensorInfo();
284 const TensorInfo& detectionScores = layer.GetOutputSlot(2).GetTensorInfo();
285 const TensorInfo& numDetections = layer.GetOutputSlot(3).GetTensorInfo();
286
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000287 const DetectionPostProcessDescriptor& descriptor = cLayer->GetParameters();
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000288 result = layerSupportObject->IsDetectionPostProcessSupported(boxEncodings,
289 scores,
290 anchors,
291 detectionBoxes,
292 detectionClasses,
293 detectionScores,
294 numDetections,
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000295 descriptor,
296 reason);
297 break;
298 }
josh minor4a3c6102020-01-06 16:40:46 -0600299 case LayerType::ElementwiseUnary:
300 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100301 auto cLayer = PolymorphicDowncast<const ElementwiseUnaryLayer*>(&layer);
josh minor4a3c6102020-01-06 16:40:46 -0600302
303 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
304 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
305
306 result = layerSupportObject->IsElementwiseUnarySupported(OverrideDataType(input, dataType),
307 OverrideDataType(output, dataType),
308 cLayer->GetParameters(),
309 reason);
310 break;
311 }
Ryan OSheaec6c6802020-06-05 17:17:06 +0100312 case LayerType::Fill:
313 {
314 auto cLayer = PolymorphicDowncast<const FillLayer*>(&layer);
315 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
316 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
317 const FillDescriptor& descriptor = cLayer->GetParameters();
318
319 result = layerSupportObject->IsFillSupported(
320 OverrideDataType(input, dataType),
321 OverrideDataType(output, dataType),
322 descriptor,
323 reason);
324 break;
325 }
telsoa014fcda012018-03-09 14:13:49 +0000326 case LayerType::FakeQuantization:
327 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100328 auto cLayer = PolymorphicDowncast<const FakeQuantizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000329 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100330 result = layerSupportObject->IsFakeQuantizationSupported(OverrideDataType(input, dataType),
331 cLayer->GetParameters(),
332 reason);
telsoa014fcda012018-03-09 14:13:49 +0000333 break;
334 }
335 case LayerType::Floor:
336 {
337 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
338 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100339 result = layerSupportObject->IsFloorSupported(OverrideDataType(input, dataType),
340 OverrideDataType(output, dataType),
341 reason);
telsoa014fcda012018-03-09 14:13:49 +0000342 break;
343 }
344 case LayerType::FullyConnected:
345 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100346 auto cLayer = PolymorphicDowncast<const FullyConnectedLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000347 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100348 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100349 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100350
351 TensorInfo biasInfo;
352 const TensorInfo * biasInfoPtr = nullptr;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000353 static const TensorInfo dummyBFloat16Bias(TensorShape({1,1,1,1}), DataType::BFloat16);
telsoa01c577f2c2018-08-31 09:22:23 +0100354 static const TensorInfo dummyFloat16Bias(TensorShape({1,1,1,1}), DataType::Float16);
355 static const TensorInfo dummyFloat32Bias(TensorShape({1,1,1,1}), DataType::Float32);
356 static const TensorInfo dummyQA8Bias(TensorShape({1,1,1,1}), DataType::Signed32);
357
358 const FullyConnectedDescriptor& descriptor = cLayer->GetParameters();
359 if (descriptor.m_BiasEnabled)
360 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100361 ARMNN_ASSERT(cLayer->m_Bias.get() != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100362 biasInfo = OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
363 biasInfoPtr = &biasInfo;
364 }
365 else
366 {
367 // If biases are not enabled pass a dummy tensorinfo for the validation
368 switch(input.GetDataType())
369 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000370 case DataType::BFloat16:
371 {
372 biasInfoPtr = &dummyBFloat16Bias;
373 break;
374 }
telsoa01c577f2c2018-08-31 09:22:23 +0100375 case DataType::Float16:
376 {
377 biasInfoPtr = &dummyFloat16Bias;
378 break;
379 }
380 case DataType::Float32:
381 {
382 biasInfoPtr = &dummyFloat32Bias;
383 break;
384 }
Derek Lambertif90c56d2020-01-10 17:14:08 +0000385 case DataType::QAsymmU8:
Keith Davisa8565012020-02-14 12:22:40 +0000386 case DataType::QAsymmS8:
Keith Davis9d0ff742020-02-03 14:47:54 +0000387 case DataType::QSymmS8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000388 case DataType::QSymmS16:
telsoa01c577f2c2018-08-31 09:22:23 +0100389 {
390 biasInfoPtr = &dummyQA8Bias;
391 break;
392 }
393 default:
394 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100395 ARMNN_ASSERT_MSG(false, "Unexpected bias type");
telsoa01c577f2c2018-08-31 09:22:23 +0100396 }
397 }
398 }
399
David Beck33f0ae02018-10-18 15:13:56 +0100400 result = layerSupportObject->IsFullyConnectedSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100401 OverrideDataType(input, dataType),
402 OverrideDataType(output, dataType),
403 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
404 *biasInfoPtr,
405 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100406 reason);
telsoa014fcda012018-03-09 14:13:49 +0000407 break;
408 }
narpra01b89b05f2019-01-16 09:53:09 +0000409 case LayerType::Gather:
410 {
411 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
412 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
413 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Teresa Charlin52664732020-06-29 16:27:03 +0100414 auto cLayer = PolymorphicDowncast<const GatherLayer*>(&layer);
415 const GatherDescriptor& descriptor = cLayer->GetParameters();
narpra01b89b05f2019-01-16 09:53:09 +0000416 result = layerSupportObject->IsGatherSupported(OverrideDataType(input0, dataType),
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100417 input1,
narpra01b89b05f2019-01-16 09:53:09 +0000418 OverrideDataType(output, dataType),
Teresa Charlin52664732020-06-29 16:27:03 +0100419 descriptor,
narpra01b89b05f2019-01-16 09:53:09 +0000420 reason);
421 break;
422 }
telsoa014fcda012018-03-09 14:13:49 +0000423 case LayerType::Input:
424 {
425 const TensorInfo& input = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100426 result = layerSupportObject->IsInputSupported(OverrideDataType(input, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000427 break;
428 }
Kevin Mayce5045a2019-10-02 14:07:47 +0100429 case LayerType::InstanceNormalization:
430 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100431 auto cLayer = PolymorphicDowncast<const InstanceNormalizationLayer*>(&layer);
Kevin Mayce5045a2019-10-02 14:07:47 +0100432 const InstanceNormalizationDescriptor& descriptor = cLayer->GetParameters();
433
434 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
435 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
436
437 result = layerSupportObject->IsInstanceNormalizationSupported(
438 OverrideDataType(input, dataType),
439 OverrideDataType(output, dataType),
440 descriptor,
441 reason);
442 break;
443 }
telsoa014fcda012018-03-09 14:13:49 +0000444 case LayerType::L2Normalization:
445 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100446 auto cLayer = PolymorphicDowncast<const L2NormalizationLayer*>(&layer);
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100447 const L2NormalizationDescriptor& descriptor = cLayer->GetParameters();
448
telsoa014fcda012018-03-09 14:13:49 +0000449 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100450 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100451
David Beck33f0ae02018-10-18 15:13:56 +0100452 result = layerSupportObject->IsL2NormalizationSupported(
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100453 OverrideDataType(input, dataType),
454 OverrideDataType(output, dataType),
455 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100456 reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100457 break;
458 }
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100459 case LayerType::LogSoftmax:
460 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100461 auto cLayer = PolymorphicDowncast<const LogSoftmaxLayer*>(&layer);
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100462
463 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
464 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
465
466 result = layerSupportObject->IsLogSoftmaxSupported(OverrideDataType(input, dataType),
467 OverrideDataType(output, dataType),
468 cLayer->GetParameters(),
469 reason);
470 break;
471 }
telsoa01c577f2c2018-08-31 09:22:23 +0100472 case LayerType::Lstm:
473 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100474 auto cLayer = PolymorphicDowncast<const LstmLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100475 const LstmDescriptor& descriptor = cLayer->GetParameters();
476
477 // All inputs.
478 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
479 dataType);
480 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
481 dataType);
482 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
483 dataType);
484 // All outputs
485 const TensorInfo& scratchBuffer = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
486 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
487 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
488 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(3).GetTensorInfo(), dataType);
489
490 // Basic parameters
491 const TensorInfo& inputToForgetWeights
492 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
493 const TensorInfo& inputToCellWeights
494 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
495 const TensorInfo& inputToOutputWeights
496 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
497 const TensorInfo& recurrentToForgetWeights
498 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
499 const TensorInfo& recurrentToCellWeights
500 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
501 const TensorInfo& recurrentToOutputWeights
502 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
503 const TensorInfo& forgetGateBias
504 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
505 const TensorInfo& cellBias
506 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
507 const TensorInfo& outputGateBias
508 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
509
Jan Eilersd01a83c2019-07-03 18:20:40 +0100510 LstmInputParamsInfo paramsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100511
Jan Eilersd01a83c2019-07-03 18:20:40 +0100512 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
513 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
514 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
515 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
516 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
517 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
518 paramsInfo.m_ForgetGateBias = &forgetGateBias;
519 paramsInfo.m_CellBias = &cellBias;
520 paramsInfo.m_OutputGateBias = &outputGateBias;
521
522
523 // Optional parameters
telsoa01c577f2c2018-08-31 09:22:23 +0100524 TensorInfo optInputToInputWeights;
525 TensorInfo optRecurrentToInputWeights;
526 TensorInfo optCellToInputWeights;
527 TensorInfo optInputGateBias;
528 TensorInfo optProjectionWeights;
529 TensorInfo optProjectionBias;
530 TensorInfo optCellToForgetWeights;
531 TensorInfo optCellToOutputWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100532 TensorInfo optInputLayerNormWeights;
533 TensorInfo optForgetLayerNormWeights;
534 TensorInfo optCellLayerNormWeights;
535 TensorInfo optOutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100536
537 if(!descriptor.m_CifgEnabled)
538 {
539 optInputToInputWeights =
540 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100541 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100542
543 optRecurrentToInputWeights =
544 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100545 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100546 optInputGateBias =
547 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100548 paramsInfo.m_InputGateBias = &optInputGateBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100549 }
550
551 if(descriptor.m_ProjectionEnabled)
552 {
553 optProjectionWeights =
554 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100555 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100556 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
557 {
558 optProjectionBias =
559 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100560 paramsInfo.m_ProjectionBias = &optProjectionBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100561 }
562 }
563
564 if(descriptor.m_PeepholeEnabled)
565 {
Jan Eilerse2062cd2020-03-30 15:07:45 +0100566 if(!descriptor.m_CifgEnabled)
567 {
568 optCellToInputWeights =
569 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
570 dataType);
571 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
572 }
telsoa01c577f2c2018-08-31 09:22:23 +0100573 optCellToForgetWeights =
574 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100575 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100576 optCellToOutputWeights =
577 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100578 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100579 }
580
Jan Eilers38e05bd2019-06-26 13:10:09 +0100581 if(descriptor.m_LayerNormEnabled)
582 {
Ferran Balaguere30c16e2019-07-24 17:03:45 +0100583 if (!descriptor.m_CifgEnabled)
584 {
585 optInputLayerNormWeights = OverrideDataType(
586 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
587 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
588 }
Jan Eilers38e05bd2019-06-26 13:10:09 +0100589
590 optForgetLayerNormWeights = OverrideDataType(
591 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100592 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100593
594 optCellLayerNormWeights = OverrideDataType(
595 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100596 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100597
598 optOutputLayerNormWeights = OverrideDataType(
599 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100600 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100601 }
602
David Beck33f0ae02018-10-18 15:13:56 +0100603 result = layerSupportObject->IsLstmSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100604 input,
605 outputStateIn,
606 cellStateIn,
607 scratchBuffer,
608 outputStateOut,
609 cellStateOut,
610 output,
611 descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +0100612 paramsInfo,
613 reason);
telsoa014fcda012018-03-09 14:13:49 +0000614 break;
615 }
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000616 case LayerType::Maximum:
617 {
618 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
619 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
620 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
621
622 result = layerSupportObject->IsMaximumSupported(OverrideDataType(input0, dataType),
623 OverrideDataType(input1, dataType),
624 OverrideDataType(output, dataType),
625 reason);
626 break;
627 }
narpra01b89b05f2019-01-16 09:53:09 +0000628 case LayerType::MemCopy:
629 {
630 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
631 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000632
narpra01b89b05f2019-01-16 09:53:09 +0000633 result = layerSupportObject->IsMemCopySupported(OverrideDataType(input, dataType),
634 OverrideDataType(output, dataType),
635 reason);
636 break;
637 }
Derek Lambertif674aa02019-08-01 15:56:25 +0100638 case LayerType::MemImport:
639 {
640 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
641 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
642
643 result = layerSupportObject->IsMemImportSupported(OverrideDataType(input, dataType),
644 OverrideDataType(output, dataType),
645 reason);
646 break;
647 }
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100648 case LayerType::Merge:
649 {
650 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
651 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
652 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
653
654 result = layerSupportObject->IsMergeSupported(OverrideDataType(input0, dataType),
655 OverrideDataType(input1, dataType),
656 OverrideDataType(output, dataType),
657 reason);
658 break;
659 }
Jim Flynne242f2d2019-05-22 14:24:13 +0100660 case LayerType::Concat:
telsoa014fcda012018-03-09 14:13:49 +0000661 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100662 auto cLayer = PolymorphicDowncast<const ConcatLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000663
telsoa01c577f2c2018-08-31 09:22:23 +0100664 // Get vector of all inputs.
665 auto getTensorInfo = [&dataType](const InputSlot& slot)
telsoa014fcda012018-03-09 14:13:49 +0000666 {
telsoa01c577f2c2018-08-31 09:22:23 +0100667 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
telsoa014fcda012018-03-09 14:13:49 +0000668 };
telsoa01c577f2c2018-08-31 09:22:23 +0100669 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
670 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
671 std::vector<TensorInfo> inputs(beginI, endI);
telsoa014fcda012018-03-09 14:13:49 +0000672
telsoa01c577f2c2018-08-31 09:22:23 +0100673 auto getTensorInfoPtr = [](const TensorInfo& info)
674 {
675 return &info;
676 };
677 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
678 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
679 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
telsoa014fcda012018-03-09 14:13:49 +0000680
Nikhil Raj8599a412018-11-19 14:51:07 +0000681 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
682
Jim Flynne242f2d2019-05-22 14:24:13 +0100683 result = layerSupportObject->IsConcatSupported(inputPtrs, output, cLayer->GetParameters(), reason);
684
685
telsoa014fcda012018-03-09 14:13:49 +0000686 break;
687 }
688 case LayerType::Multiplication:
689 {
690 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
691 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100692 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100693 result = layerSupportObject->IsMultiplicationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100694 OverrideDataType(input0, dataType),
695 OverrideDataType(input1, dataType),
696 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100697 reason);
telsoa014fcda012018-03-09 14:13:49 +0000698 break;
699 }
700 case LayerType::Normalization:
701 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100702 auto cLayer = PolymorphicDowncast<const NormalizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000703 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
704 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100705 result = layerSupportObject->IsNormalizationSupported(OverrideDataType(input, dataType),
706 OverrideDataType(output, dataType),
707 cLayer->GetParameters(),
708 reason);
telsoa014fcda012018-03-09 14:13:49 +0000709 break;
710 }
711 case LayerType::Output:
712 {
713 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100714 result = layerSupportObject->IsOutputSupported(OverrideDataType(output, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000715 break;
716 }
717 case LayerType::Permute:
718 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100719 auto cLayer = PolymorphicDowncast<const PermuteLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000720 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
721 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100722 result = layerSupportObject->IsPermuteSupported(OverrideDataType(input, dataType),
723 OverrideDataType(output, dataType),
724 cLayer->GetParameters(),
725 reason);
telsoa014fcda012018-03-09 14:13:49 +0000726 break;
727 }
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100728 case LayerType::Pad:
729 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100730 auto cLayer = PolymorphicDowncast<const PadLayer*>(&layer);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100731 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
732 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100733 result = layerSupportObject->IsPadSupported(
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100734 OverrideDataType(input, dataType),
735 OverrideDataType(output, dataType),
736 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100737 reason);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100738 break;
739 }
telsoa014fcda012018-03-09 14:13:49 +0000740 case LayerType::Pooling2d:
741 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100742 auto cLayer = PolymorphicDowncast<const Pooling2dLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000743 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
744 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100745 result = layerSupportObject->IsPooling2dSupported(OverrideDataType(input, dataType),
746 OverrideDataType(output, dataType),
747 cLayer->GetParameters(),
748 reason);
telsoa014fcda012018-03-09 14:13:49 +0000749 break;
750 }
Matteo Martincigh49124022019-01-11 13:25:59 +0000751 case LayerType::PreCompiled:
752 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100753 auto cLayer = PolymorphicDowncast<const PreCompiledLayer*>(&layer);
Matteo Martincigh49124022019-01-11 13:25:59 +0000754 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
755 result = layerSupportObject->IsPreCompiledSupported(OverrideDataType(input, dataType),
756 cLayer->GetParameters(),
757 reason);
758 break;
759 }
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000760 case LayerType::Quantize:
761 {
762 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
763 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
764 result = layerSupportObject->IsQuantizeSupported(input, output, reason);
765 break;
766 }
James Conroy586a9aa2020-03-20 08:49:33 +0000767 case LayerType::QLstm:
768 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100769 auto cLayer = PolymorphicDowncast<const QLstmLayer*>(&layer);
James Conroy586a9aa2020-03-20 08:49:33 +0000770 const QLstmDescriptor& descriptor = cLayer->GetParameters();
771
772 // Inputs
773 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
774 const TensorInfo& previousOutputIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
775 const TensorInfo& previousCellStateIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
776
777 // Outputs
778 const TensorInfo& outputStateOut = layer.GetOutputSlot(0).GetTensorInfo();
779 const TensorInfo& cellStateOut = layer.GetOutputSlot(1).GetTensorInfo();
780 const TensorInfo& output = layer.GetOutputSlot(2).GetTensorInfo();
781
782 // Lstm parameters
783 LstmInputParamsInfo paramsInfo;
784
785 // Basic parameters
786 paramsInfo.m_InputToForgetWeights = &cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo();
787 paramsInfo.m_InputToCellWeights = &cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo();
788 paramsInfo.m_InputToOutputWeights = &cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo();
789
790 paramsInfo.m_RecurrentToForgetWeights =
791 &cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo();
792 paramsInfo.m_RecurrentToCellWeights =
793 &cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo();
794 paramsInfo.m_RecurrentToOutputWeights =
795 &cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo();
796
797 paramsInfo.m_ForgetGateBias = &cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo();
798 paramsInfo.m_CellBias = &cLayer->m_BasicParameters.m_CellBias->GetTensorInfo();
799 paramsInfo.m_OutputGateBias = &cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo();
800
801 if(!descriptor.m_CifgEnabled)
802 {
803 paramsInfo.m_InputToInputWeights = &cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo();
804 paramsInfo.m_RecurrentToInputWeights =
805 &cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo();
806 paramsInfo.m_InputGateBias = &cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo();
807 }
808
809 if(descriptor.m_ProjectionEnabled)
810 {
811 paramsInfo.m_ProjectionWeights = &cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo();
James Conroyed324052020-05-18 15:16:42 +0100812
813 // Projection bias is optional even if projection is enabled
814 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
815 {
816 paramsInfo.m_ProjectionBias = &cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo();
817 }
James Conroy586a9aa2020-03-20 08:49:33 +0000818 }
819
820 if(descriptor.m_PeepholeEnabled)
821 {
822 if (!descriptor.m_CifgEnabled)
823 {
824 paramsInfo.m_CellToInputWeights =
825 &cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo();
826 }
827
828 paramsInfo.m_CellToForgetWeights =
829 &cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo();
830 paramsInfo.m_CellToOutputWeights = &cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo();
831 }
832
833 if(descriptor.m_LayerNormEnabled)
834 {
835 if (!descriptor.m_CifgEnabled)
836 {
837 paramsInfo.m_InputLayerNormWeights =
838 &cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo();
839 }
840
841 paramsInfo.m_ForgetLayerNormWeights =
842 &cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo();
843 paramsInfo.m_CellLayerNormWeights =
844 &cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo();
845 paramsInfo.m_OutputLayerNormWeights =
846 &cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo();
847 }
848
849 result = layerSupportObject->IsQLstmSupported(input,
850 previousOutputIn,
851 previousCellStateIn,
852 outputStateOut,
853 cellStateOut,
854 output,
855 descriptor,
856 paramsInfo,
857 reason);
858 break;
859 }
James Conroyee18dc82019-07-17 11:27:46 +0100860 case LayerType::QuantizedLstm:
861 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100862 auto cLayer = PolymorphicDowncast<const QuantizedLstmLayer*>(&layer);
James Conroyee18dc82019-07-17 11:27:46 +0100863
864 // Inputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100865 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
866 const TensorInfo& previousCellStateIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
867 const TensorInfo& previousOutputIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100868
869 // Outputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100870 const TensorInfo& cellStateOut = layer.GetOutputSlot(0).GetTensorInfo();
871 const TensorInfo& output = layer.GetOutputSlot(1).GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100872
873 // QuantizedLstm parameters
James Conroyee18dc82019-07-17 11:27:46 +0100874 QuantizedLstmInputParamsInfo paramsInfo;
875
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100876 paramsInfo.m_InputToInputWeights =
877 &cLayer->m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo();
878 paramsInfo.m_InputToForgetWeights =
879 &cLayer->m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo();
880 paramsInfo.m_InputToCellWeights =
881 &cLayer->m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo();
882 paramsInfo.m_InputToOutputWeights =
883 &cLayer->m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100884
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100885 paramsInfo.m_RecurrentToInputWeights =
886 &cLayer->m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo();
887 paramsInfo.m_RecurrentToForgetWeights =
888 &cLayer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo();
889 paramsInfo.m_RecurrentToCellWeights =
890 &cLayer->m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo();
891 paramsInfo.m_RecurrentToOutputWeights =
892 &cLayer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100893
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100894 paramsInfo.m_InputGateBias =
895 &cLayer->m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo();
896 paramsInfo.m_ForgetGateBias =
897 &cLayer->m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo();
898 paramsInfo.m_CellBias =
899 &cLayer->m_QuantizedLstmParameters.m_CellBias->GetTensorInfo();
900 paramsInfo.m_OutputGateBias =
901 &cLayer->m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo();;
James Conroyee18dc82019-07-17 11:27:46 +0100902
903 result = layerSupportObject->IsQuantizedLstmSupported(input,
904 previousCellStateIn,
905 previousOutputIn,
906 cellStateOut,
907 output,
908 paramsInfo,
909 reason);
910 break;
911 }
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100912 case LayerType::Division:
913 {
914 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
915 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
916 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100917 result = layerSupportObject->IsDivisionSupported(
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100918 OverrideDataType(input0, dataType),
919 OverrideDataType(input1, dataType),
920 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100921 reason);
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100922 break;
923 }
Finn Williams2605b232020-06-10 15:53:46 +0100924 case LayerType::Rank:
925 {
926 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
927 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
928 result = layerSupportObject->IsRankSupported(OverrideDataType(input, dataType),
929 OverrideDataType(output, dataType),
930 reason);
931 break;
932 }
telsoa014fcda012018-03-09 14:13:49 +0000933 case LayerType::Reshape:
934 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100935 auto cLayer = PolymorphicDowncast<const ReshapeLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000936 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Kevin Maya023c402019-12-12 17:28:05 +0000937 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000938 result = layerSupportObject->IsReshapeSupported(OverrideDataType(input, dataType),
Kevin Maya023c402019-12-12 17:28:05 +0000939 OverrideDataType(output, dataType),
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000940 cLayer->GetParameters(),
941 reason);
telsoa014fcda012018-03-09 14:13:49 +0000942 break;
943 }
Teresa Charlina9075df2019-06-27 15:41:57 +0100944 case LayerType::Resize:
945 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100946 auto cLayer = PolymorphicDowncast<const ResizeLayer*>(&layer);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100947 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Teresa Charlina9075df2019-06-27 15:41:57 +0100948 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
949 result = layerSupportObject->IsResizeSupported(OverrideDataType(input, dataType),
950 OverrideDataType(output, dataType),
951 cLayer->GetParameters(),
952 reason);
953 break;
954 }
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100955 case LayerType::Slice:
956 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100957 auto cLayer = PolymorphicDowncast<const SliceLayer*>(&layer);
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100958
959 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
960 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
961
962 result = layerSupportObject->IsSliceSupported(OverrideDataType(input, dataType),
963 OverrideDataType(output, dataType),
964 cLayer->GetParameters(),
965 reason);
966 break;
967 }
telsoa014fcda012018-03-09 14:13:49 +0000968 case LayerType::Softmax:
969 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100970 auto cLayer = PolymorphicDowncast<const SoftmaxLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000971 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100972 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100973 result = layerSupportObject->IsSoftmaxSupported(OverrideDataType(input, dataType),
974 OverrideDataType(output, dataType),
975 cLayer->GetParameters(),
976 reason);
telsoa014fcda012018-03-09 14:13:49 +0000977 break;
978 }
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000979 case LayerType::SpaceToBatchNd:
980 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100981 auto cLayer = PolymorphicDowncast<const SpaceToBatchNdLayer*>(&layer);
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000982 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
983 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
984 result = layerSupportObject->IsSpaceToBatchNdSupported(OverrideDataType(input, dataType),
985 OverrideDataType(output, dataType),
986 cLayer->GetParameters(),
987 reason);
988 break;
989 }
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100990 case LayerType::SpaceToDepth:
991 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100992 auto cLayer = PolymorphicDowncast<const SpaceToDepthLayer*>(&layer);
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100993
994 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
995 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
996
997 result = layerSupportObject->IsSpaceToDepthSupported(OverrideDataType(input, dataType),
998 OverrideDataType(output, dataType),
999 cLayer->GetParameters(),
1000 reason);
1001 break;
1002 }
telsoa014fcda012018-03-09 14:13:49 +00001003 case LayerType::Splitter:
1004 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001005 auto cLayer = PolymorphicDowncast<const SplitterLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +00001006 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001007
1008 // Get vector of all outputs.
1009 auto getTensorInfo = [&dataType](const OutputSlot& slot)
1010 {
1011 return OverrideDataType(slot.GetTensorInfo(), dataType);
1012 };
1013 auto beginI = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfo);
1014 auto endI = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfo);
1015 std::vector<TensorInfo> outputs(beginI, endI);
1016
1017 const std::vector<std::reference_wrapper<TensorInfo>> outputPtrs(outputs.begin(), outputs.end());
1018
David Beck33f0ae02018-10-18 15:13:56 +01001019 result = layerSupportObject->IsSplitterSupported(OverrideDataType(input, dataType),
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001020 outputPtrs,
David Beck33f0ae02018-10-18 15:13:56 +01001021 cLayer->GetParameters(),
1022 reason);
telsoa014fcda012018-03-09 14:13:49 +00001023 break;
1024 }
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001025 case LayerType::Stack:
1026 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001027 auto cLayer = PolymorphicDowncast<const StackLayer*>(&layer);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001028
1029 // Get vector of all inputs.
1030 auto getTensorInfo = [&dataType](const InputSlot& slot)
1031 {
1032 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
1033 };
1034 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
1035 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
1036 std::vector<TensorInfo> inputs(beginI, endI);
1037
1038 auto getTensorInfoPtr = [](const TensorInfo& info)
1039 {
1040 return &info;
1041 };
1042 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
1043 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
1044 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
1045
1046 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1047
1048 result = layerSupportObject->IsStackSupported(inputPtrs, output, cLayer->GetParameters(), reason);
1049
1050 break;
1051 }
Derek Lamberti013c3902019-10-21 10:46:16 +01001052 case LayerType::StandIn:
1053 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001054 auto cLayer = PolymorphicDowncast<const StandInLayer*>(&layer);
Derek Lamberti013c3902019-10-21 10:46:16 +01001055
1056 // Get vector of all inputs.
1057 auto getTensorInfoIn = [&dataType](const InputSlot& slot)
1058 {
1059 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
1060 };
1061 auto getTensorInfoOut = [&dataType](const OutputSlot& slot)
1062 {
1063 return OverrideDataType(slot.GetTensorInfo(), dataType);
1064 };
1065 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfoIn);
1066 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfoIn);
1067 std::vector<TensorInfo> inputs(beginI, endI);
1068
1069 auto beginO = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfoOut);
1070 auto endO = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfoOut);
1071 std::vector<TensorInfo> outputs(beginO, endO);
1072
1073
1074 auto getTensorInfoPtr = [](const TensorInfo& info)
1075 {
1076 return &info;
1077 };
1078 auto beginPtrI = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
1079 auto endPtrI = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
1080 std::vector<const TensorInfo*> inputPtrs(beginPtrI, endPtrI);
1081
1082 auto beginPtrO = boost::make_transform_iterator(outputs.begin(), getTensorInfoPtr);
1083 auto endPtrO = boost::make_transform_iterator(outputs.end(), getTensorInfoPtr);
1084 std::vector<const TensorInfo*> outputPtrs(beginPtrO, endPtrO);
1085
1086
1087 result = layerSupportObject->IsStandInSupported(inputPtrs,
1088 outputPtrs,
1089 cLayer->GetParameters(),
1090 reason);
1091 break;
1092 }
Conor Kennedy430b5d82018-11-14 15:28:28 +00001093 case LayerType::StridedSlice:
1094 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001095 auto cLayer = PolymorphicDowncast<const StridedSliceLayer*>(&layer);
Conor Kennedy430b5d82018-11-14 15:28:28 +00001096 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1097 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1098 result = layerSupportObject->IsStridedSliceSupported(OverrideDataType(input, dataType),
1099 OverrideDataType(output, dataType),
1100 cLayer->GetParameters(),
1101 reason);
1102 break;
1103 }
David Beckc2044fe2018-09-05 15:00:38 +01001104 case LayerType::Subtraction:
1105 {
1106 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1107 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1108 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +01001109 result = layerSupportObject->IsSubtractionSupported(
David Beckc2044fe2018-09-05 15:00:38 +01001110 OverrideDataType(input0, dataType),
1111 OverrideDataType(input1, dataType),
1112 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +01001113 reason);
David Beckc2044fe2018-09-05 15:00:38 +01001114 break;
1115 }
Sadik Armaganeff363d2019-04-05 15:25:46 +01001116 case LayerType::Switch:
1117 {
1118 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1119 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1120 const TensorInfo& output0 = layer.GetOutputSlot(0).GetTensorInfo();
1121 const TensorInfo& output1 = layer.GetOutputSlot(1).GetTensorInfo();
1122 result = layerSupportObject->IsSwitchSupported(OverrideDataType(input0, dataType),
1123 OverrideDataType(input1, dataType),
1124 OverrideDataType(output0, dataType),
1125 OverrideDataType(output1, dataType),
1126 reason);
1127 break;
1128 }
narpra0132b90462018-09-13 11:07:48 +01001129 case LayerType::Mean:
1130 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001131 auto cLayer = PolymorphicDowncast<const MeanLayer*>(&layer);
narpra0132b90462018-09-13 11:07:48 +01001132 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1133 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +01001134 result = layerSupportObject->IsMeanSupported(
narpra0132b90462018-09-13 11:07:48 +01001135 OverrideDataType(input, dataType),
1136 OverrideDataType(output, dataType),
1137 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +01001138 reason);
narpra0132b90462018-09-13 11:07:48 +01001139 break;
1140 }
kevmay0190539692018-11-29 08:40:19 +00001141 case LayerType::Minimum:
1142 {
1143 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1144 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1145 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1146 result = layerSupportObject->IsMinimumSupported(OverrideDataType(input0, dataType),
1147 OverrideDataType(input1, dataType),
1148 OverrideDataType(output, dataType),
1149 reason);
1150 break;
1151 }
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001152 case LayerType::Prelu:
1153 {
1154 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1155 const TensorInfo& alpha = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1156 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1157 result = layerSupportObject->IsPreluSupported(OverrideDataType(input, dataType),
1158 OverrideDataType(alpha, dataType),
1159 OverrideDataType(output, dataType),
1160 reason);
1161 break;
1162 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001163 case LayerType::Transpose:
1164 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001165 auto cLayer = PolymorphicDowncast<const TransposeLayer*>(&layer);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001166 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1167 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1168 result = layerSupportObject->IsTransposeSupported(OverrideDataType(input, dataType),
1169 OverrideDataType(output, dataType),
1170 cLayer->GetParameters(),
1171 reason);
1172 break;
1173 }
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001174 case LayerType::TransposeConvolution2d:
1175 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001176 auto cLayer = PolymorphicDowncast<const TransposeConvolution2dLayer*>(&layer);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001177
1178 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1179 dataType);
1180 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1181
1182 const TransposeConvolution2dDescriptor& descriptor = cLayer->GetParameters();
1183
1184 Optional<TensorInfo> biases;
1185 if (descriptor.m_BiasEnabled)
1186 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001187 ARMNN_ASSERT(cLayer->m_Bias.get() != nullptr);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001188 biases = OverrideDataType(cLayer->m_Bias->GetTensorInfo(),
1189 GetBiasTypeFromWeightsType(dataType));
1190 }
1191
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001192 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001193 const TensorInfo weights = OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType);
1194
1195 result = layerSupportObject->IsTransposeConvolution2dSupported(input,
1196 output,
1197 descriptor,
1198 weights,
1199 biases,
1200 reason);
1201
1202 break;
1203 }
telsoa014fcda012018-03-09 14:13:49 +00001204 default:
1205 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001206 ARMNN_ASSERT_MSG(false, "WorkloadFactory did not recognise type of layer.");
David Beck33f0ae02018-10-18 15:13:56 +01001207 reason.value() = "Unrecognised layer type";
telsoa014fcda012018-03-09 14:13:49 +00001208 result = false;
1209 break;
1210 }
1211 }
telsoa014fcda012018-03-09 14:13:49 +00001212 return result;
1213}
1214
David Beckdcb751f2018-10-03 11:42:42 +01001215bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +01001216 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +01001217 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +00001218{
Jan Eilersbb446e52020-04-02 13:56:54 +01001219 auto layer = PolymorphicDowncast<const Layer*>(&connectableLayer);
David Beck33f0ae02018-10-18 15:13:56 +01001220 return IsLayerSupported(layer->GetBackendId(), connectableLayer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +00001221}
1222
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001223// Default Implementations
Derek Lamberti901ea112019-12-10 22:07:09 +00001224std::unique_ptr<IWorkload> IWorkloadFactory::CreateAbs(const AbsQueueDescriptor& /*descriptor*/,
1225 const WorkloadInfo& /*info*/) const
Kevin May868eb142019-09-04 17:29:31 +01001226{
1227 return std::unique_ptr<IWorkload>();
1228}
1229
Derek Lamberti901ea112019-12-10 22:07:09 +00001230std::unique_ptr<IWorkload> IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& /*descriptor*/,
1231 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001232{
1233 return std::unique_ptr<IWorkload>();
1234}
1235
Derek Lamberti901ea112019-12-10 22:07:09 +00001236std::unique_ptr<IWorkload> IWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& /*descriptor*/,
1237 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001238{
1239 return std::unique_ptr<IWorkload>();
1240}
1241
Derek Lamberti901ea112019-12-10 22:07:09 +00001242std::unique_ptr<IWorkload> IWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& /*descriptor*/,
1243 const WorkloadInfo& /*info*/) const
Nikhil Rajee391d52019-09-05 17:50:44 +01001244{
1245 return std::unique_ptr<IWorkload>();
1246}
1247
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001248std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00001249 const BatchNormalizationQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001250{
1251 return std::unique_ptr<IWorkload>();
1252}
1253
Derek Lamberti901ea112019-12-10 22:07:09 +00001254std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& /*desc*/,
1255 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001256{
1257 return std::unique_ptr<IWorkload>();
1258}
1259
Derek Lamberti901ea112019-12-10 22:07:09 +00001260std::unique_ptr<IWorkload> IWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& /*descriptor*/,
1261 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001262{
1263 return std::unique_ptr<IWorkload>();
1264}
1265
Derek Lamberti901ea112019-12-10 22:07:09 +00001266std::unique_ptr<IWorkload> IWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& /*descriptor*/,
1267 const WorkloadInfo& /*info*/) const
Jim Flynn4ed6c832019-05-20 11:02:46 +01001268{
1269 return std::unique_ptr<IWorkload>();
1270}
1271
Derek Lamberti901ea112019-12-10 22:07:09 +00001272std::unique_ptr<IWorkload> IWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& /*descriptor*/,
1273 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001274{
1275 return std::unique_ptr<IWorkload>();
1276}
1277
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +00001278std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertBf16ToFp32(const ConvertBf16ToFp32QueueDescriptor& /*desc*/,
1279 const WorkloadInfo& /*info*/) const
1280{
1281 return std::unique_ptr<IWorkload>();
1282}
1283
Derek Lamberti901ea112019-12-10 22:07:09 +00001284std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp16ToFp32(const ConvertFp16ToFp32QueueDescriptor& /*desc*/,
1285 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001286{
1287 return std::unique_ptr<IWorkload>();
1288}
1289
Narumol Prangnawaratea54a012020-03-16 16:36:10 +00001290std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToBf16(const ConvertFp32ToBf16QueueDescriptor& /*desc*/,
1291 const WorkloadInfo& /*info*/) const
1292{
1293 return std::unique_ptr<IWorkload>();
1294}
1295
Derek Lamberti901ea112019-12-10 22:07:09 +00001296std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToFp16(const ConvertFp32ToFp16QueueDescriptor& /*desc*/,
1297 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001298{
1299 return std::unique_ptr<IWorkload>();
1300}
1301
Derek Lamberti901ea112019-12-10 22:07:09 +00001302std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& /*descriptor*/,
1303 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001304{
1305 return std::unique_ptr<IWorkload>();
1306}
1307
Derek Lamberti901ea112019-12-10 22:07:09 +00001308std::unique_ptr<IWorkload> IWorkloadFactory::CreateDebug(const DebugQueueDescriptor& /*descriptor*/,
1309 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001310{
1311 return std::unique_ptr<IWorkload>();
1312}
1313
Derek Lamberti901ea112019-12-10 22:07:09 +00001314std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& /*descriptor*/,
1315 const WorkloadInfo& /*info*/) const
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +01001316{
1317 return std::unique_ptr<IWorkload>();
1318}
1319
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001320std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthwiseConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00001321 const DepthwiseConvolution2dQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001322{
1323 return std::unique_ptr<IWorkload>();
1324}
1325
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00001326std::unique_ptr<IWorkload> IWorkloadFactory::CreateDequantize(
Derek Lamberti901ea112019-12-10 22:07:09 +00001327 const DequantizeQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00001328{
1329 return std::unique_ptr<IWorkload>();
1330}
1331
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001332std::unique_ptr<IWorkload> IWorkloadFactory::CreateDetectionPostProcess(
Derek Lamberti901ea112019-12-10 22:07:09 +00001333 const DetectionPostProcessQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001334{
1335 return std::unique_ptr<IWorkload>();
1336}
1337
Derek Lamberti901ea112019-12-10 22:07:09 +00001338std::unique_ptr<IWorkload> IWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& /*descriptor*/,
1339 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001340{
1341 return std::unique_ptr<IWorkload>();
1342}
1343
josh minor4a3c6102020-01-06 16:40:46 -06001344std::unique_ptr<IWorkload> IWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& /*desc*/,
1345 const WorkloadInfo& /*info*/) const
1346{
1347 return std::unique_ptr<IWorkload>();
1348}
1349
Derek Lamberti901ea112019-12-10 22:07:09 +00001350std::unique_ptr<IWorkload> IWorkloadFactory::CreateEqual(const EqualQueueDescriptor& /*descriptor*/,
1351 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001352{
1353 return std::unique_ptr<IWorkload>();
1354}
1355
Derek Lamberti901ea112019-12-10 22:07:09 +00001356std::unique_ptr<IWorkload> IWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& /*desc*/,
1357 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001358{
1359 return std::unique_ptr<IWorkload>();
1360}
1361
Ryan OSheaec6c6802020-06-05 17:17:06 +01001362std::unique_ptr<IWorkload> IWorkloadFactory::CreateFill(const FillQueueDescriptor& /*descriptor*/,
1363 const WorkloadInfo& /*info*/) const
1364{
1365 return std::unique_ptr<IWorkload>();
1366}
1367
Derek Lamberti901ea112019-12-10 22:07:09 +00001368std::unique_ptr<IWorkload> IWorkloadFactory::CreateFloor(const FloorQueueDescriptor& /*descriptor*/,
1369 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001370{
1371 return std::unique_ptr<IWorkload>();
1372}
1373
Derek Lamberti901ea112019-12-10 22:07:09 +00001374std::unique_ptr<IWorkload> IWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& /*descriptor*/,
1375 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001376{
1377 return std::unique_ptr<IWorkload>();
1378}
1379
Derek Lamberti901ea112019-12-10 22:07:09 +00001380std::unique_ptr<IWorkload> IWorkloadFactory::CreateGather(const GatherQueueDescriptor& /*descriptor*/,
1381 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001382{
1383 return std::unique_ptr<IWorkload>();
1384}
1385
Derek Lamberti901ea112019-12-10 22:07:09 +00001386std::unique_ptr<IWorkload> IWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& /*descriptor*/,
1387 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001388{
1389 return std::unique_ptr<IWorkload>();
1390}
1391
Kevin Mayce5045a2019-10-02 14:07:47 +01001392std::unique_ptr<IWorkload> IWorkloadFactory::CreateInstanceNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00001393 const InstanceNormalizationQueueDescriptor& /*descriptor*/,
1394 const WorkloadInfo& /*info*/) const
Kevin Mayce5045a2019-10-02 14:07:47 +01001395{
1396 return std::unique_ptr<IWorkload>();
1397}
1398
Derek Lamberti901ea112019-12-10 22:07:09 +00001399std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& /*desc*/,
1400 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001401{
1402 return std::unique_ptr<IWorkload>();
1403}
1404
Derek Lamberti901ea112019-12-10 22:07:09 +00001405std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& /*descriptor*/,
1406 const WorkloadInfo& /*info*/) const
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +01001407{
1408 return std::unique_ptr<IWorkload>();
1409}
1410
Derek Lamberti901ea112019-12-10 22:07:09 +00001411std::unique_ptr<IWorkload> IWorkloadFactory::CreateLstm(const LstmQueueDescriptor& /*descriptor*/,
1412 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001413{
1414 return std::unique_ptr<IWorkload>();
1415}
1416
Derek Lamberti901ea112019-12-10 22:07:09 +00001417std::unique_ptr<IWorkload> IWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& /*descriptor*/,
1418 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001419{
1420 return std::unique_ptr<IWorkload>();
1421}
1422
Derek Lamberti901ea112019-12-10 22:07:09 +00001423std::unique_ptr<IWorkload> IWorkloadFactory::CreateMean(const MeanQueueDescriptor& /*descriptor*/,
1424 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001425{
1426 return std::unique_ptr<IWorkload>();
1427}
1428
Derek Lamberti901ea112019-12-10 22:07:09 +00001429std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& /*descriptor*/,
1430 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001431{
1432 return std::unique_ptr<IWorkload>();
1433}
1434
Derek Lamberti901ea112019-12-10 22:07:09 +00001435std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& /*descriptor*/,
1436 const WorkloadInfo& /*info*/) const
Derek Lambertif674aa02019-08-01 15:56:25 +01001437{
1438 return std::unique_ptr<IWorkload>();
1439}
1440
Derek Lamberti901ea112019-12-10 22:07:09 +00001441std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerge(const MergeQueueDescriptor& /*descriptor*/,
1442 const WorkloadInfo& /*info*/) const
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01001443{
1444 return std::unique_ptr<IWorkload>();
1445}
1446
Derek Lamberti901ea112019-12-10 22:07:09 +00001447std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerger(const MergerQueueDescriptor& /*descriptor*/,
1448 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001449{
1450 return std::unique_ptr<IWorkload>();
1451}
1452
Derek Lamberti901ea112019-12-10 22:07:09 +00001453std::unique_ptr<IWorkload> IWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& /*descriptor*/,
1454 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001455{
1456 return std::unique_ptr<IWorkload>();
1457}
1458
Derek Lamberti901ea112019-12-10 22:07:09 +00001459std::unique_ptr<IWorkload> IWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& /*descriptor*/,
1460 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001461{
1462 return std::unique_ptr<IWorkload>();
1463}
1464
Derek Lamberti901ea112019-12-10 22:07:09 +00001465std::unique_ptr<IWorkload> IWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& /*descriptor*/,
1466 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001467{
1468 return std::unique_ptr<IWorkload>();
1469}
1470
Derek Lamberti901ea112019-12-10 22:07:09 +00001471std::unique_ptr<IWorkload> IWorkloadFactory::CreateOutput(const OutputQueueDescriptor& /*descriptor*/,
1472 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001473{
1474 return std::unique_ptr<IWorkload>();
1475}
1476
Derek Lamberti901ea112019-12-10 22:07:09 +00001477std::unique_ptr<IWorkload> IWorkloadFactory::CreatePad(const PadQueueDescriptor& /*descriptor*/,
1478 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001479{
1480 return std::unique_ptr<IWorkload>();
1481}
1482
Derek Lamberti901ea112019-12-10 22:07:09 +00001483std::unique_ptr<IWorkload> IWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& /*descriptor*/,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001484 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001485{
1486 return std::unique_ptr<IWorkload>();
1487}
1488
Derek Lamberti901ea112019-12-10 22:07:09 +00001489std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& /*descriptor*/,
1490 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001491{
1492 return std::unique_ptr<IWorkload>();
1493}
1494
Derek Lamberti901ea112019-12-10 22:07:09 +00001495std::unique_ptr<IWorkload> IWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
1496 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001497{
1498 return std::unique_ptr<IWorkload>();
1499}
1500
Derek Lamberti901ea112019-12-10 22:07:09 +00001501std::unique_ptr<IWorkload> IWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &/*descriptor*/,
1502 const WorkloadInfo &/*info*/) const
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001503{
1504 return std::unique_ptr<IWorkload>();
1505}
1506
Derek Lamberti901ea112019-12-10 22:07:09 +00001507std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& /*descriptor*/,
1508 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001509{
1510 return std::unique_ptr<IWorkload>();
1511}
1512
James Conroy586a9aa2020-03-20 08:49:33 +00001513std::unique_ptr<IWorkload> IWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& /*descriptor*/,
1514 const WorkloadInfo& /*info*/) const
1515{
1516 return std::unique_ptr<IWorkload>();
1517}
1518
Derek Lamberti901ea112019-12-10 22:07:09 +00001519std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& /*descriptor*/,
1520 const WorkloadInfo& /*info*/) const
James Conroyee18dc82019-07-17 11:27:46 +01001521{
1522 return std::unique_ptr<IWorkload>();
1523}
Finn Williams2605b232020-06-10 15:53:46 +01001524std::unique_ptr<IWorkload> IWorkloadFactory::CreateRank(const RankQueueDescriptor& /*descriptor*/,
1525 const WorkloadInfo& /*info*/) const
1526{
1527 return std::unique_ptr<IWorkload>();
1528}
James Conroyee18dc82019-07-17 11:27:46 +01001529
Derek Lamberti901ea112019-12-10 22:07:09 +00001530std::unique_ptr<IWorkload> IWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& /*descriptor*/,
1531 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001532{
1533 return std::unique_ptr<IWorkload>();
1534}
1535
Derek Lamberti901ea112019-12-10 22:07:09 +00001536std::unique_ptr<IWorkload> IWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& /*descriptor*/,
1537 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001538{
1539 return std::unique_ptr<IWorkload>();
1540}
1541
Derek Lamberti901ea112019-12-10 22:07:09 +00001542std::unique_ptr<IWorkload> IWorkloadFactory::CreateResize(const ResizeQueueDescriptor& /*descriptor*/,
1543 const WorkloadInfo& /*info*/) const
Teresa Charlina9075df2019-06-27 15:41:57 +01001544{
1545 return std::unique_ptr<IWorkload>();
1546}
1547
Derek Lamberti901ea112019-12-10 22:07:09 +00001548std::unique_ptr<IWorkload> IWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& /*descriptor*/,
1549 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001550{
1551 return std::unique_ptr<IWorkload>();
1552}
1553
Derek Lamberti901ea112019-12-10 22:07:09 +00001554std::unique_ptr<IWorkload> IWorkloadFactory::CreateSlice(const SliceQueueDescriptor& /*descriptor*/,
1555 const WorkloadInfo& /*info*/) const
1556{
1557 return std::unique_ptr<IWorkload>();
1558}
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001559
Derek Lamberti901ea112019-12-10 22:07:09 +00001560std::unique_ptr<IWorkload> IWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& /*descriptor*/,
1561 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001562{
1563 return std::unique_ptr<IWorkload>();
1564}
1565
Derek Lamberti901ea112019-12-10 22:07:09 +00001566std::unique_ptr<IWorkload> IWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& /*descriptor*/,
1567 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001568{
1569 return std::unique_ptr<IWorkload>();
1570}
1571
Derek Lamberti901ea112019-12-10 22:07:09 +00001572std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& /*descriptor*/,
1573 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001574{
1575 return std::unique_ptr<IWorkload>();
1576}
1577
Derek Lamberti901ea112019-12-10 22:07:09 +00001578std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& /*descriptor*/,
1579 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001580{
1581 return std::unique_ptr<IWorkload>();
1582}
1583
Derek Lamberti901ea112019-12-10 22:07:09 +00001584std::unique_ptr<IWorkload> IWorkloadFactory::CreateStack(const StackQueueDescriptor& /*descriptor*/,
1585 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001586{
1587 return std::unique_ptr<IWorkload>();
1588}
1589
Derek Lamberti901ea112019-12-10 22:07:09 +00001590std::unique_ptr<IWorkload> IWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& /*descriptor*/,
1591 const WorkloadInfo& /*info*/) const
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001592{
1593 return std::unique_ptr<IWorkload>();
1594}
1595
Derek Lamberti901ea112019-12-10 22:07:09 +00001596std::unique_ptr<IWorkload> IWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& /*descriptor*/,
1597 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001598{
1599 return std::unique_ptr<IWorkload>();
1600}
1601
Derek Lamberti901ea112019-12-10 22:07:09 +00001602std::unique_ptr<IWorkload> IWorkloadFactory::CreateSwitch(const SwitchQueueDescriptor& /*descriptor*/,
1603 const WorkloadInfo& /*info*/) const
Sadik Armaganeff363d2019-04-05 15:25:46 +01001604{
1605 return std::unique_ptr<IWorkload>();
1606}
1607
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001608std::unique_ptr<IWorkload> IWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& /*descriptor*/,
1609 const WorkloadInfo& /*info*/) const
1610{
1611 return std::unique_ptr<IWorkload>();
1612}
1613
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001614std::unique_ptr<IWorkload> IWorkloadFactory::CreateTransposeConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00001615 const TransposeConvolution2dQueueDescriptor& /*descriptor*/,
1616 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001617{
1618 return std::unique_ptr<IWorkload>();
surmeh013537c2c2018-05-18 16:31:43 +01001619}
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001620
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001621} // namepsace armnn