blob: 090e2856d8f5d1b8de3601e953d572f2c15a0dc5 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00005
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <Layer.hpp>
7#include <LayersFwd.hpp>
David Beckdcb751f2018-10-03 11:42:42 +01008
David Beckb4540be2018-09-24 13:18:27 +01009#include <armnn/Types.hpp>
Sadik Armagana097d2a2021-11-24 15:47:28 +000010#include <armnn/backends/IBackendInternal.hpp>
Francis Murtaghcae45682021-04-26 10:07:49 +010011#include <armnn/backends/ILayerSupport.hpp>
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000012#include <armnn/BackendHelper.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000013#include <armnn/BackendRegistry.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010014#include <armnn/utility/PolymorphicDowncast.hpp>
Finn Williams3e54d032020-10-22 16:53:35 +010015#include <armnn/utility/TransformIterator.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000016
Colm Donelan0c479742021-12-10 12:43:54 +000017#include <armnn/backends/WorkloadFactory.hpp>
18#include <armnn/backends/TensorHandle.hpp>
telsoa014fcda012018-03-09 14:13:49 +000019
David Beck111b5d92018-11-12 14:59:37 +000020#include <sstream>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000021
telsoa014fcda012018-03-09 14:13:49 +000022namespace armnn
23{
24
telsoa01c577f2c2018-08-31 09:22:23 +010025namespace
26{
Finn Williams3e54d032020-10-22 16:53:35 +010027using LayerList = std::list<Layer*>;
28using Iterator = LayerList::const_iterator; // Const so pointers in the list can't be modified externally.
telsoa01c577f2c2018-08-31 09:22:23 +010029
David Beck29c75de2018-10-23 13:35:58 +010030const TensorInfo OverrideDataType(const TensorInfo& info, Optional<DataType> type)
31{
32 if (!type)
33 {
34 return info;
telsoa01c577f2c2018-08-31 09:22:23 +010035 }
36
Matthew Sloyan81beae32021-07-13 19:46:11 +010037 return TensorInfo(info.GetShape(),
38 type.value(),
39 info.GetQuantizationScale(),
40 info.GetQuantizationOffset(),
41 info.IsConstant());
telsoa01c577f2c2018-08-31 09:22:23 +010042}
43
David Beck29c75de2018-10-23 13:35:58 +010044} // anonymous namespace
45
Sadik Armagana097d2a2021-11-24 15:47:28 +000046inline armnn::Optional<armnn::DataType> GetBiasTypeFromWeightsType(armnn::Optional<armnn::DataType> weightsType)
47{
48 if (!weightsType)
49 {
50 return weightsType;
51 }
52
53 switch(weightsType.value())
54 {
55 case armnn::DataType::BFloat16:
56 case armnn::DataType::Float16:
57 case armnn::DataType::Float32:
58 return weightsType;
59 case armnn::DataType::QAsymmS8:
60 case armnn::DataType::QAsymmU8:
61 case armnn::DataType::QSymmS8:
62 case armnn::DataType::QSymmS16:
63 return armnn::DataType::Signed32;
64 default:
65 ARMNN_ASSERT_MSG(false, "GetBiasTypeFromWeightsType(): Unsupported data type.");
66 }
67 return armnn::EmptyOptional();
68}
69
70
Sadik Armagan045f6be2020-09-10 13:37:32 +010071bool IWorkloadFactory::IsLayerConfigurationSupported(const BackendId& backendId,
72 const IConnectableLayer& connectableLayer,
73 Optional<DataType> dataType,
74 std::string& outReasonIfUnsupported,
75 const ModelOptions& modelOptions)
telsoa014fcda012018-03-09 14:13:49 +000076{
David Beck33f0ae02018-10-18 15:13:56 +010077 Optional<std::string&> reason = outReasonIfUnsupported;
telsoa014fcda012018-03-09 14:13:49 +000078 bool result;
Jan Eilersbb446e52020-04-02 13:56:54 +010079 const Layer& layer = *(PolymorphicDowncast<const Layer*>(&connectableLayer));
David Beckdcb751f2018-10-03 11:42:42 +010080
David Beck111b5d92018-11-12 14:59:37 +000081 auto const& backendRegistry = BackendRegistryInstance();
82 if (!backendRegistry.IsBackendRegistered(backendId))
83 {
84 std::stringstream ss;
85 ss << connectableLayer.GetName() << " is not supported on " << backendId
86 << " because this backend is not registered.";
87
88 outReasonIfUnsupported = ss.str();
89 return false;
90 }
91
92 auto backendFactory = backendRegistry.GetFactory(backendId);
93 auto backendObject = backendFactory();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000094 auto layerSupportObject = LayerSupportHandle(backendObject->GetLayerSupport(modelOptions), backendId);
David Beck33f0ae02018-10-18 15:13:56 +010095
telsoa014fcda012018-03-09 14:13:49 +000096 switch(layer.GetType())
97 {
98 case LayerType::Activation:
99 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100100 auto cLayer = PolymorphicDowncast<const ActivationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000101 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100102 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000103 result = layerSupportObject.IsActivationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100104 OverrideDataType(input, dataType),
105 OverrideDataType(output, dataType),
106 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100107 reason);
telsoa014fcda012018-03-09 14:13:49 +0000108 break;
109 }
110 case LayerType::Addition:
111 {
112 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
113 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
114 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000115 result = layerSupportObject.IsAdditionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100116 OverrideDataType(input0, dataType),
117 OverrideDataType(input1, dataType),
118 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100119 reason);
telsoa014fcda012018-03-09 14:13:49 +0000120 break;
121 }
Nikhil Rajee391d52019-09-05 17:50:44 +0100122 case LayerType::ArgMinMax:
123 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100124 auto cLayer = PolymorphicDowncast<const ArgMinMaxLayer*>(&layer);
Nikhil Rajee391d52019-09-05 17:50:44 +0100125 const ArgMinMaxDescriptor& descriptor = cLayer->GetParameters();
126
127 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
128 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000129 result = layerSupportObject.IsArgMinMaxSupported(
Nikhil Rajee391d52019-09-05 17:50:44 +0100130 OverrideDataType(input, dataType),
Narumol Prangnawaratd1f57732019-10-31 14:24:02 +0000131 OverrideDataType(output, DataType::Signed32),
Nikhil Rajee391d52019-09-05 17:50:44 +0100132 descriptor,
133 reason);
134 break;
135 }
telsoa014fcda012018-03-09 14:13:49 +0000136 case LayerType::BatchNormalization:
137 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100138 auto cLayer = PolymorphicDowncast<const BatchNormalizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000139 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100140 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
141 const TensorInfo& mean = cLayer->m_Mean->GetTensorInfo();
142 const TensorInfo& var = cLayer->m_Variance->GetTensorInfo();
143 const TensorInfo& beta = cLayer->m_Beta->GetTensorInfo();
144 const TensorInfo& gamma = cLayer->m_Gamma->GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000145 result = layerSupportObject.IsBatchNormalizationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100146 OverrideDataType(input, dataType),
147 OverrideDataType(output, dataType),
148 OverrideDataType(mean, dataType),
149 OverrideDataType(var, dataType),
150 OverrideDataType(beta, dataType),
151 OverrideDataType(gamma, dataType),
152 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100153 reason);
telsoa014fcda012018-03-09 14:13:49 +0000154 break;
155 }
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000156 case LayerType::BatchToSpaceNd:
157 {
158 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
159 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Jan Eilersbb446e52020-04-02 13:56:54 +0100160 auto cLayer = PolymorphicDowncast<const BatchToSpaceNdLayer*>(&layer);
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000161
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000162 result = layerSupportObject.IsBatchToSpaceNdSupported(OverrideDataType(input, dataType),
163 OverrideDataType(output, dataType),
164 cLayer->GetParameters(),
165 reason);
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000166 break;
167 }
mathad01b392e982021-04-07 12:07:30 +0100168 case LayerType::Cast:
169 {
170 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
171 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
172
173 result = layerSupportObject.IsCastSupported(OverrideDataType(input, dataType),
174 OverrideDataType(output, dataType),
175 reason);
176 break;
177 }
Simon Obute51f67772021-09-03 15:50:13 +0100178 case LayerType::ChannelShuffle:
179 {
180 auto cLayer = PolymorphicDowncast<const ChannelShuffleLayer*>(&layer);
181
182 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
183 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
184
185 const ChannelShuffleDescriptor descriptor = cLayer->GetParameters();
186
187 result = layerSupportObject.IsChannelShuffleSupported(OverrideDataType(input, dataType),
188 OverrideDataType(output, dataType),
189 descriptor,
190 reason);
191 break;
192 }
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100193 case LayerType::Comparison:
194 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100195 auto cLayer = PolymorphicDowncast<const ComparisonLayer*>(&layer);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100196
197 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
198 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
199 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
200
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000201 result = layerSupportObject.IsComparisonSupported(OverrideDataType(input0, dataType),
202 OverrideDataType(input1, dataType),
203 OverrideDataType(output, DataType::Boolean),
204 cLayer->GetParameters(),
205 reason);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100206 break;
207 }
telsoa014fcda012018-03-09 14:13:49 +0000208 case LayerType::Constant:
209 {
210 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000211 result = layerSupportObject.IsConstantSupported(OverrideDataType(output, dataType), reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100212 break;
213 }
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000214 case LayerType::ConvertBf16ToFp32:
215 {
216 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
217 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000218 result = layerSupportObject.IsConvertBf16ToFp32Supported(input, output, reason);
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000219 break;
220 }
telsoa01c577f2c2018-08-31 09:22:23 +0100221 case LayerType::ConvertFp16ToFp32:
222 {
223 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
224 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000225 result = layerSupportObject.IsConvertFp16ToFp32Supported(input, output, reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100226 break;
227 }
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000228 case LayerType::ConvertFp32ToBf16:
229 {
230 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
231 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000232 result = layerSupportObject.IsConvertFp32ToBf16Supported(input, output, reason);
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000233 break;
234 }
telsoa01c577f2c2018-08-31 09:22:23 +0100235 case LayerType::ConvertFp32ToFp16:
236 {
237 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
238 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000239 result = layerSupportObject.IsConvertFp32ToFp16Supported(input, output, reason);
telsoa014fcda012018-03-09 14:13:49 +0000240 break;
241 }
242 case LayerType::Convolution2d:
243 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100244 auto cLayer = PolymorphicDowncast<const Convolution2dLayer*>(&layer);
arovir01a6824102018-08-28 17:40:45 +0100245
246 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
247 dataType);
telsoa01c577f2c2018-08-31 09:22:23 +0100248 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100249 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
surmeh013537c2c2018-05-18 16:31:43 +0100250
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100251 const Convolution2dDescriptor& descriptor = cLayer->GetParameters();
surmeh013537c2c2018-05-18 16:31:43 +0100252
arovir01a6824102018-08-28 17:40:45 +0100253 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100254 Optional<TensorInfo> biases;
surmeh013537c2c2018-05-18 16:31:43 +0100255 if (descriptor.m_BiasEnabled)
256 {
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100257 biases = OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
surmeh013537c2c2018-05-18 16:31:43 +0100258 }
259
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000260 result = layerSupportObject.IsConvolution2dSupported(
surmeh013537c2c2018-05-18 16:31:43 +0100261 input,
262 output,
263 descriptor,
telsoa01c577f2c2018-08-31 09:22:23 +0100264 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100265 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100266 reason);
telsoa014fcda012018-03-09 14:13:49 +0000267 break;
268 }
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100269 case LayerType::Convolution3d:
270 {
271 auto cLayer = PolymorphicDowncast<const Convolution3dLayer*>(&layer);
272
273 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
274 dataType);
275 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100276
277 ARMNN_ASSERT_MSG(layer.GetInputSlot(1).GetConnection(),
278 "Convolution3dLayer: Weights should be connected as a Constant Layer.");
279 const TensorInfo weights = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
280 dataType);
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100281
282 const Convolution3dDescriptor& descriptor = cLayer->GetParameters();
283
284 // Construct optional biases object based on the value of m_BiasEnabled
285 Optional<TensorInfo> biases;
286 if (descriptor.m_BiasEnabled)
287 {
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100288 biases = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
289 GetBiasTypeFromWeightsType(dataType));
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100290 }
291
292 result = layerSupportObject.IsConvolution3dSupported(
293 input,
294 output,
295 descriptor,
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100296 weights,
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100297 biases,
298 reason);
299 break;
300 }
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000301 case LayerType::Debug:
302 {
303 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
304 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
305
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000306 result = layerSupportObject.IsDebugSupported(OverrideDataType(input, dataType),
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000307 OverrideDataType(output, dataType),
308 reason);
309 break;
310 }
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100311 case LayerType::DepthToSpace:
312 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100313 auto cLayer = PolymorphicDowncast<const DepthToSpaceLayer*>(&layer);
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100314
315 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
316 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
317
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000318 result = layerSupportObject.IsDepthToSpaceSupported(OverrideDataType(input, dataType),
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100319 OverrideDataType(output, dataType),
320 cLayer->GetParameters(),
321 reason);
322 break;
323 }
telsoa014fcda012018-03-09 14:13:49 +0000324 case LayerType::DepthwiseConvolution2d:
325 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100326 auto cLayer = PolymorphicDowncast<const DepthwiseConvolution2dLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100327 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
328 dataType);
329 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100330 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100331
telsoa01c577f2c2018-08-31 09:22:23 +0100332 const DepthwiseConvolution2dDescriptor& descriptor = cLayer->GetParameters();
arovir01a6824102018-08-28 17:40:45 +0100333
334 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100335 Optional<TensorInfo> biases;
telsoa01c577f2c2018-08-31 09:22:23 +0100336 if (descriptor.m_BiasEnabled)
337 {
David Beck5eec11d2018-10-04 15:43:17 +0100338 biases =
339 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
telsoa01c577f2c2018-08-31 09:22:23 +0100340 }
telsoa01c577f2c2018-08-31 09:22:23 +0100341
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000342 result = layerSupportObject.IsDepthwiseConvolutionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100343 input,
344 output,
345 descriptor,
346 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100347 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100348 reason);
telsoa014fcda012018-03-09 14:13:49 +0000349 break;
350 }
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000351 case LayerType::Dequantize:
352 {
353 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
354 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
355
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000356 result = layerSupportObject.IsDequantizeSupported(input,
357 OverrideDataType(output, dataType),
358 reason);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000359 break;
360 }
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000361 case LayerType::DetectionPostProcess:
362 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100363 auto cLayer = PolymorphicDowncast<const DetectionPostProcessLayer*>(&layer);
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000364 const TensorInfo& boxEncodings = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
365 const TensorInfo& scores = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
366 const TensorInfo& anchors = cLayer->m_Anchors->GetTensorInfo();
367
368 const TensorInfo& detectionBoxes = layer.GetOutputSlot(0).GetTensorInfo();
369 const TensorInfo& detectionClasses = layer.GetOutputSlot(1).GetTensorInfo();
370 const TensorInfo& detectionScores = layer.GetOutputSlot(2).GetTensorInfo();
371 const TensorInfo& numDetections = layer.GetOutputSlot(3).GetTensorInfo();
372
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000373 const DetectionPostProcessDescriptor& descriptor = cLayer->GetParameters();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000374 result = layerSupportObject.IsDetectionPostProcessSupported(boxEncodings,
375 scores,
376 anchors,
377 detectionBoxes,
378 detectionClasses,
379 detectionScores,
380 numDetections,
381 descriptor,
382 reason);
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000383 break;
384 }
josh minor4a3c6102020-01-06 16:40:46 -0600385 case LayerType::ElementwiseUnary:
386 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100387 auto cLayer = PolymorphicDowncast<const ElementwiseUnaryLayer*>(&layer);
josh minor4a3c6102020-01-06 16:40:46 -0600388
389 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
390 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
391
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000392 result = layerSupportObject.IsElementwiseUnarySupported(OverrideDataType(input, dataType),
393 OverrideDataType(output, dataType),
394 cLayer->GetParameters(),
395 reason);
josh minor4a3c6102020-01-06 16:40:46 -0600396 break;
397 }
Ryan OSheaec6c6802020-06-05 17:17:06 +0100398 case LayerType::Fill:
399 {
400 auto cLayer = PolymorphicDowncast<const FillLayer*>(&layer);
401 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
402 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
403 const FillDescriptor& descriptor = cLayer->GetParameters();
404
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000405 result = layerSupportObject.IsFillSupported(
Ryan OSheaec6c6802020-06-05 17:17:06 +0100406 OverrideDataType(input, dataType),
407 OverrideDataType(output, dataType),
408 descriptor,
409 reason);
410 break;
411 }
telsoa014fcda012018-03-09 14:13:49 +0000412 case LayerType::FakeQuantization:
413 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100414 auto cLayer = PolymorphicDowncast<const FakeQuantizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000415 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000416 result = layerSupportObject.IsFakeQuantizationSupported(OverrideDataType(input, dataType),
417 cLayer->GetParameters(),
418 reason);
telsoa014fcda012018-03-09 14:13:49 +0000419 break;
420 }
421 case LayerType::Floor:
422 {
423 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
424 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000425 result = layerSupportObject.IsFloorSupported(OverrideDataType(input, dataType),
426 OverrideDataType(output, dataType),
427 reason);
telsoa014fcda012018-03-09 14:13:49 +0000428 break;
429 }
430 case LayerType::FullyConnected:
431 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100432 auto cLayer = PolymorphicDowncast<const FullyConnectedLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000433 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100434 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000435
436 const FullyConnectedDescriptor& descriptor = cLayer->GetParameters();
437 TensorInfo weightsInfo;
438 const TensorInfo* weightsInfoPtr = nullptr;
439
Matthew Sloyan81beae32021-07-13 19:46:11 +0100440 weightsInfo = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(), dataType);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000441 weightsInfoPtr = &weightsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100442
443 TensorInfo biasInfo;
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000444 const TensorInfo* biasInfoPtr = nullptr;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000445 static const TensorInfo dummyBFloat16Bias(TensorShape({1,1,1,1}), DataType::BFloat16);
telsoa01c577f2c2018-08-31 09:22:23 +0100446 static const TensorInfo dummyFloat16Bias(TensorShape({1,1,1,1}), DataType::Float16);
447 static const TensorInfo dummyFloat32Bias(TensorShape({1,1,1,1}), DataType::Float32);
448 static const TensorInfo dummyQA8Bias(TensorShape({1,1,1,1}), DataType::Signed32);
449
telsoa01c577f2c2018-08-31 09:22:23 +0100450 if (descriptor.m_BiasEnabled)
451 {
Matthew Sloyan81beae32021-07-13 19:46:11 +0100452 biasInfo = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(), dataType);
453 biasInfoPtr = &biasInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100454 }
455 else
456 {
457 // If biases are not enabled pass a dummy tensorinfo for the validation
458 switch(input.GetDataType())
459 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000460 case DataType::BFloat16:
461 {
462 biasInfoPtr = &dummyBFloat16Bias;
463 break;
464 }
telsoa01c577f2c2018-08-31 09:22:23 +0100465 case DataType::Float16:
466 {
467 biasInfoPtr = &dummyFloat16Bias;
468 break;
469 }
470 case DataType::Float32:
471 {
472 biasInfoPtr = &dummyFloat32Bias;
473 break;
474 }
Derek Lambertif90c56d2020-01-10 17:14:08 +0000475 case DataType::QAsymmU8:
Keith Davisa8565012020-02-14 12:22:40 +0000476 case DataType::QAsymmS8:
Keith Davis9d0ff742020-02-03 14:47:54 +0000477 case DataType::QSymmS8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000478 case DataType::QSymmS16:
telsoa01c577f2c2018-08-31 09:22:23 +0100479 {
480 biasInfoPtr = &dummyQA8Bias;
481 break;
482 }
483 default:
484 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100485 ARMNN_ASSERT_MSG(false, "Unexpected bias type");
telsoa01c577f2c2018-08-31 09:22:23 +0100486 }
487 }
488 }
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000489 result = layerSupportObject.IsFullyConnectedSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100490 OverrideDataType(input, dataType),
491 OverrideDataType(output, dataType),
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000492 *weightsInfoPtr,
telsoa01c577f2c2018-08-31 09:22:23 +0100493 *biasInfoPtr,
494 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100495 reason);
telsoa014fcda012018-03-09 14:13:49 +0000496 break;
497 }
narpra01b89b05f2019-01-16 09:53:09 +0000498 case LayerType::Gather:
499 {
500 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
501 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
502 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Teresa Charlin52664732020-06-29 16:27:03 +0100503 auto cLayer = PolymorphicDowncast<const GatherLayer*>(&layer);
504 const GatherDescriptor& descriptor = cLayer->GetParameters();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000505 result = layerSupportObject.IsGatherSupported(OverrideDataType(input0, dataType),
506 input1,
507 OverrideDataType(output, dataType),
508 descriptor,
509 reason);
narpra01b89b05f2019-01-16 09:53:09 +0000510 break;
511 }
telsoa014fcda012018-03-09 14:13:49 +0000512 case LayerType::Input:
513 {
514 const TensorInfo& input = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000515 result = layerSupportObject.IsInputSupported(OverrideDataType(input, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000516 break;
517 }
Kevin Mayce5045a2019-10-02 14:07:47 +0100518 case LayerType::InstanceNormalization:
519 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100520 auto cLayer = PolymorphicDowncast<const InstanceNormalizationLayer*>(&layer);
Kevin Mayce5045a2019-10-02 14:07:47 +0100521 const InstanceNormalizationDescriptor& descriptor = cLayer->GetParameters();
522
523 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
524 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
525
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000526 result = layerSupportObject.IsInstanceNormalizationSupported(
Kevin Mayce5045a2019-10-02 14:07:47 +0100527 OverrideDataType(input, dataType),
528 OverrideDataType(output, dataType),
529 descriptor,
530 reason);
531 break;
532 }
telsoa014fcda012018-03-09 14:13:49 +0000533 case LayerType::L2Normalization:
534 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100535 auto cLayer = PolymorphicDowncast<const L2NormalizationLayer*>(&layer);
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100536 const L2NormalizationDescriptor& descriptor = cLayer->GetParameters();
537
telsoa014fcda012018-03-09 14:13:49 +0000538 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100539 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100540
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000541 result = layerSupportObject.IsL2NormalizationSupported(
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100542 OverrideDataType(input, dataType),
543 OverrideDataType(output, dataType),
544 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100545 reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100546 break;
547 }
James Conroyaba90cd2020-11-06 16:28:18 +0000548 case LayerType::LogicalBinary:
549 {
550 auto cLayer = PolymorphicDowncast<const LogicalBinaryLayer*>(&layer);
551
552 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
553 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
554 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
555
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000556 result = layerSupportObject.IsLogicalBinarySupported(input0,
557 input1,
558 output,
559 cLayer->GetParameters(),
560 reason);
James Conroyaba90cd2020-11-06 16:28:18 +0000561 break;
562 }
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100563 case LayerType::LogSoftmax:
564 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100565 auto cLayer = PolymorphicDowncast<const LogSoftmaxLayer*>(&layer);
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100566
567 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
568 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
569
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000570 result = layerSupportObject.IsLogSoftmaxSupported(OverrideDataType(input, dataType),
571 OverrideDataType(output, dataType),
572 cLayer->GetParameters(),
573 reason);
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100574 break;
575 }
telsoa01c577f2c2018-08-31 09:22:23 +0100576 case LayerType::Lstm:
577 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100578 auto cLayer = PolymorphicDowncast<const LstmLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100579 const LstmDescriptor& descriptor = cLayer->GetParameters();
580
581 // All inputs.
582 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
583 dataType);
584 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
585 dataType);
586 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
587 dataType);
588 // All outputs
589 const TensorInfo& scratchBuffer = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
590 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
591 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
592 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(3).GetTensorInfo(), dataType);
593
594 // Basic parameters
595 const TensorInfo& inputToForgetWeights
596 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
597 const TensorInfo& inputToCellWeights
598 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
599 const TensorInfo& inputToOutputWeights
600 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
601 const TensorInfo& recurrentToForgetWeights
602 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
603 const TensorInfo& recurrentToCellWeights
604 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
605 const TensorInfo& recurrentToOutputWeights
606 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
607 const TensorInfo& forgetGateBias
608 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
609 const TensorInfo& cellBias
610 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
611 const TensorInfo& outputGateBias
612 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
613
Jan Eilersd01a83c2019-07-03 18:20:40 +0100614 LstmInputParamsInfo paramsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100615
Jan Eilersd01a83c2019-07-03 18:20:40 +0100616 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
617 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
618 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
619 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
620 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
621 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
622 paramsInfo.m_ForgetGateBias = &forgetGateBias;
623 paramsInfo.m_CellBias = &cellBias;
624 paramsInfo.m_OutputGateBias = &outputGateBias;
625
626
627 // Optional parameters
telsoa01c577f2c2018-08-31 09:22:23 +0100628 TensorInfo optInputToInputWeights;
629 TensorInfo optRecurrentToInputWeights;
630 TensorInfo optCellToInputWeights;
631 TensorInfo optInputGateBias;
632 TensorInfo optProjectionWeights;
633 TensorInfo optProjectionBias;
634 TensorInfo optCellToForgetWeights;
635 TensorInfo optCellToOutputWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100636 TensorInfo optInputLayerNormWeights;
637 TensorInfo optForgetLayerNormWeights;
638 TensorInfo optCellLayerNormWeights;
639 TensorInfo optOutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100640
641 if(!descriptor.m_CifgEnabled)
642 {
643 optInputToInputWeights =
644 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100645 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100646
647 optRecurrentToInputWeights =
648 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100649 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100650 optInputGateBias =
651 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100652 paramsInfo.m_InputGateBias = &optInputGateBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100653 }
654
655 if(descriptor.m_ProjectionEnabled)
656 {
657 optProjectionWeights =
658 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100659 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100660 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
661 {
662 optProjectionBias =
663 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100664 paramsInfo.m_ProjectionBias = &optProjectionBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100665 }
666 }
667
668 if(descriptor.m_PeepholeEnabled)
669 {
Jan Eilerse2062cd2020-03-30 15:07:45 +0100670 if(!descriptor.m_CifgEnabled)
671 {
672 optCellToInputWeights =
673 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
674 dataType);
675 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
676 }
telsoa01c577f2c2018-08-31 09:22:23 +0100677 optCellToForgetWeights =
678 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100679 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100680 optCellToOutputWeights =
681 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100682 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100683 }
684
Jan Eilers38e05bd2019-06-26 13:10:09 +0100685 if(descriptor.m_LayerNormEnabled)
686 {
Ferran Balaguere30c16e2019-07-24 17:03:45 +0100687 if (!descriptor.m_CifgEnabled)
688 {
689 optInputLayerNormWeights = OverrideDataType(
690 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
691 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
692 }
Jan Eilers38e05bd2019-06-26 13:10:09 +0100693
694 optForgetLayerNormWeights = OverrideDataType(
695 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100696 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100697
698 optCellLayerNormWeights = OverrideDataType(
699 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100700 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100701
702 optOutputLayerNormWeights = OverrideDataType(
703 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100704 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100705 }
706
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000707 result = layerSupportObject.IsLstmSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100708 input,
709 outputStateIn,
710 cellStateIn,
711 scratchBuffer,
712 outputStateOut,
713 cellStateOut,
714 output,
715 descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +0100716 paramsInfo,
717 reason);
telsoa014fcda012018-03-09 14:13:49 +0000718 break;
719 }
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000720 case LayerType::Maximum:
721 {
722 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
723 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
724 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
725
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000726 result = layerSupportObject.IsMaximumSupported(OverrideDataType(input0, dataType),
727 OverrideDataType(input1, dataType),
728 OverrideDataType(output, dataType),
729 reason);
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000730 break;
731 }
narpra01b89b05f2019-01-16 09:53:09 +0000732 case LayerType::MemCopy:
733 {
734 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
735 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000736
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000737 result = layerSupportObject.IsMemCopySupported(OverrideDataType(input, dataType),
738 OverrideDataType(output, dataType),
739 reason);
narpra01b89b05f2019-01-16 09:53:09 +0000740 break;
741 }
Derek Lambertif674aa02019-08-01 15:56:25 +0100742 case LayerType::MemImport:
743 {
744 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
745 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
746
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000747 result = layerSupportObject.IsMemImportSupported(OverrideDataType(input, dataType),
748 OverrideDataType(output, dataType),
749 reason);
Derek Lambertif674aa02019-08-01 15:56:25 +0100750 break;
751 }
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100752 case LayerType::Merge:
753 {
754 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
755 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
756 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
757
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000758 result = layerSupportObject.IsMergeSupported(OverrideDataType(input0, dataType),
759 OverrideDataType(input1, dataType),
760 OverrideDataType(output, dataType),
761 reason);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100762 break;
763 }
Jim Flynne242f2d2019-05-22 14:24:13 +0100764 case LayerType::Concat:
telsoa014fcda012018-03-09 14:13:49 +0000765 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100766 auto cLayer = PolymorphicDowncast<const ConcatLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000767
telsoa01c577f2c2018-08-31 09:22:23 +0100768 // Get vector of all inputs.
769 auto getTensorInfo = [&dataType](const InputSlot& slot)
telsoa014fcda012018-03-09 14:13:49 +0000770 {
telsoa01c577f2c2018-08-31 09:22:23 +0100771 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
telsoa014fcda012018-03-09 14:13:49 +0000772 };
Finn Williams3e54d032020-10-22 16:53:35 +0100773
774 auto beginI = MakeTransformIterator(layer.GetInputSlots().begin(), getTensorInfo);
775 auto endI = MakeTransformIterator(layer.GetInputSlots().end(), getTensorInfo);
telsoa01c577f2c2018-08-31 09:22:23 +0100776 std::vector<TensorInfo> inputs(beginI, endI);
telsoa014fcda012018-03-09 14:13:49 +0000777
telsoa01c577f2c2018-08-31 09:22:23 +0100778 auto getTensorInfoPtr = [](const TensorInfo& info)
779 {
780 return &info;
781 };
Finn Williams3e54d032020-10-22 16:53:35 +0100782
783 auto beginPtr = MakeTransformIterator(inputs.begin(), getTensorInfoPtr);
784 auto endPtr = MakeTransformIterator(inputs.end(), getTensorInfoPtr);
telsoa01c577f2c2018-08-31 09:22:23 +0100785 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
telsoa014fcda012018-03-09 14:13:49 +0000786
Nikhil Raj8599a412018-11-19 14:51:07 +0000787 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
788
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000789 result = layerSupportObject.IsConcatSupported(inputPtrs, output, cLayer->GetParameters(), reason);
Jim Flynne242f2d2019-05-22 14:24:13 +0100790
791
telsoa014fcda012018-03-09 14:13:49 +0000792 break;
793 }
794 case LayerType::Multiplication:
795 {
796 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
797 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100798 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000799 result = layerSupportObject.IsMultiplicationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100800 OverrideDataType(input0, dataType),
801 OverrideDataType(input1, dataType),
802 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100803 reason);
telsoa014fcda012018-03-09 14:13:49 +0000804 break;
805 }
806 case LayerType::Normalization:
807 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100808 auto cLayer = PolymorphicDowncast<const NormalizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000809 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
810 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000811 result = layerSupportObject.IsNormalizationSupported(OverrideDataType(input, dataType),
812 OverrideDataType(output, dataType),
813 cLayer->GetParameters(),
814 reason);
telsoa014fcda012018-03-09 14:13:49 +0000815 break;
816 }
817 case LayerType::Output:
818 {
819 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000820 result = layerSupportObject.IsOutputSupported(OverrideDataType(output, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000821 break;
822 }
823 case LayerType::Permute:
824 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100825 auto cLayer = PolymorphicDowncast<const PermuteLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000826 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
827 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000828 result = layerSupportObject.IsPermuteSupported(OverrideDataType(input, dataType),
829 OverrideDataType(output, dataType),
830 cLayer->GetParameters(),
831 reason);
telsoa014fcda012018-03-09 14:13:49 +0000832 break;
833 }
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100834 case LayerType::Pad:
835 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100836 auto cLayer = PolymorphicDowncast<const PadLayer*>(&layer);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100837 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
838 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000839 result = layerSupportObject.IsPadSupported(
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100840 OverrideDataType(input, dataType),
841 OverrideDataType(output, dataType),
842 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100843 reason);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100844 break;
845 }
telsoa014fcda012018-03-09 14:13:49 +0000846 case LayerType::Pooling2d:
847 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100848 auto cLayer = PolymorphicDowncast<const Pooling2dLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000849 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
850 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000851 result = layerSupportObject.IsPooling2dSupported(OverrideDataType(input, dataType),
852 OverrideDataType(output, dataType),
853 cLayer->GetParameters(),
854 reason);
telsoa014fcda012018-03-09 14:13:49 +0000855 break;
856 }
Tamás Nyíri7b885b32021-10-26 14:47:57 +0100857 case LayerType::Pooling3d:
858 {
859 auto cLayer = PolymorphicDowncast<const Pooling3dLayer*>(&layer);
860 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
861 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
862 result = layerSupportObject.IsPooling3dSupported(OverrideDataType(input, dataType),
863 OverrideDataType(output, dataType),
864 cLayer->GetParameters(),
865 reason);
866 break;
867 }
Matteo Martincigh49124022019-01-11 13:25:59 +0000868 case LayerType::PreCompiled:
869 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100870 auto cLayer = PolymorphicDowncast<const PreCompiledLayer*>(&layer);
Matteo Martincigh49124022019-01-11 13:25:59 +0000871 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000872 result = layerSupportObject.IsPreCompiledSupported(OverrideDataType(input, dataType),
873 cLayer->GetParameters(),
874 reason);
Matteo Martincigh49124022019-01-11 13:25:59 +0000875 break;
876 }
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000877 case LayerType::Quantize:
878 {
879 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
880 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000881 result = layerSupportObject.IsQuantizeSupported(input, output, reason);
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000882 break;
883 }
James Conroy586a9aa2020-03-20 08:49:33 +0000884 case LayerType::QLstm:
885 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100886 auto cLayer = PolymorphicDowncast<const QLstmLayer*>(&layer);
James Conroy586a9aa2020-03-20 08:49:33 +0000887 const QLstmDescriptor& descriptor = cLayer->GetParameters();
888
889 // Inputs
890 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
891 const TensorInfo& previousOutputIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
892 const TensorInfo& previousCellStateIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
893
894 // Outputs
895 const TensorInfo& outputStateOut = layer.GetOutputSlot(0).GetTensorInfo();
896 const TensorInfo& cellStateOut = layer.GetOutputSlot(1).GetTensorInfo();
897 const TensorInfo& output = layer.GetOutputSlot(2).GetTensorInfo();
898
899 // Lstm parameters
900 LstmInputParamsInfo paramsInfo;
901
902 // Basic parameters
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100903 ARMNN_ASSERT(cLayer->m_BasicParameters.m_InputToForgetWeights.get() != nullptr);
904 ARMNN_ASSERT(cLayer->m_BasicParameters.m_InputToCellWeights.get() != nullptr);
905 ARMNN_ASSERT(cLayer->m_BasicParameters.m_InputToOutputWeights.get() != nullptr);
James Conroy586a9aa2020-03-20 08:49:33 +0000906 paramsInfo.m_InputToForgetWeights = &cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo();
907 paramsInfo.m_InputToCellWeights = &cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo();
908 paramsInfo.m_InputToOutputWeights = &cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo();
909
910 paramsInfo.m_RecurrentToForgetWeights =
911 &cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo();
912 paramsInfo.m_RecurrentToCellWeights =
913 &cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo();
914 paramsInfo.m_RecurrentToOutputWeights =
915 &cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo();
916
917 paramsInfo.m_ForgetGateBias = &cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo();
918 paramsInfo.m_CellBias = &cLayer->m_BasicParameters.m_CellBias->GetTensorInfo();
919 paramsInfo.m_OutputGateBias = &cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo();
920
921 if(!descriptor.m_CifgEnabled)
922 {
923 paramsInfo.m_InputToInputWeights = &cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo();
924 paramsInfo.m_RecurrentToInputWeights =
925 &cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo();
926 paramsInfo.m_InputGateBias = &cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo();
927 }
928
929 if(descriptor.m_ProjectionEnabled)
930 {
931 paramsInfo.m_ProjectionWeights = &cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo();
James Conroyed324052020-05-18 15:16:42 +0100932
933 // Projection bias is optional even if projection is enabled
934 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
935 {
936 paramsInfo.m_ProjectionBias = &cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo();
937 }
James Conroy586a9aa2020-03-20 08:49:33 +0000938 }
939
940 if(descriptor.m_PeepholeEnabled)
941 {
942 if (!descriptor.m_CifgEnabled)
943 {
944 paramsInfo.m_CellToInputWeights =
945 &cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo();
946 }
947
948 paramsInfo.m_CellToForgetWeights =
949 &cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo();
950 paramsInfo.m_CellToOutputWeights = &cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo();
951 }
952
953 if(descriptor.m_LayerNormEnabled)
954 {
955 if (!descriptor.m_CifgEnabled)
956 {
957 paramsInfo.m_InputLayerNormWeights =
958 &cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo();
959 }
960
961 paramsInfo.m_ForgetLayerNormWeights =
962 &cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo();
963 paramsInfo.m_CellLayerNormWeights =
964 &cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo();
965 paramsInfo.m_OutputLayerNormWeights =
966 &cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo();
967 }
968
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000969 result = layerSupportObject.IsQLstmSupported(input,
970 previousOutputIn,
971 previousCellStateIn,
972 outputStateOut,
973 cellStateOut,
974 output,
975 descriptor,
976 paramsInfo,
977 reason);
James Conroy586a9aa2020-03-20 08:49:33 +0000978 break;
979 }
James Conroyee18dc82019-07-17 11:27:46 +0100980 case LayerType::QuantizedLstm:
981 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100982 auto cLayer = PolymorphicDowncast<const QuantizedLstmLayer*>(&layer);
James Conroyee18dc82019-07-17 11:27:46 +0100983
984 // Inputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100985 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
986 const TensorInfo& previousCellStateIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
987 const TensorInfo& previousOutputIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100988
989 // Outputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100990 const TensorInfo& cellStateOut = layer.GetOutputSlot(0).GetTensorInfo();
991 const TensorInfo& output = layer.GetOutputSlot(1).GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100992
993 // QuantizedLstm parameters
James Conroyee18dc82019-07-17 11:27:46 +0100994 QuantizedLstmInputParamsInfo paramsInfo;
995
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100996 paramsInfo.m_InputToInputWeights =
997 &cLayer->m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo();
998 paramsInfo.m_InputToForgetWeights =
999 &cLayer->m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo();
1000 paramsInfo.m_InputToCellWeights =
1001 &cLayer->m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo();
1002 paramsInfo.m_InputToOutputWeights =
1003 &cLayer->m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +01001004
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001005 paramsInfo.m_RecurrentToInputWeights =
1006 &cLayer->m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo();
1007 paramsInfo.m_RecurrentToForgetWeights =
1008 &cLayer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo();
1009 paramsInfo.m_RecurrentToCellWeights =
1010 &cLayer->m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo();
1011 paramsInfo.m_RecurrentToOutputWeights =
1012 &cLayer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +01001013
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001014 paramsInfo.m_InputGateBias =
1015 &cLayer->m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo();
1016 paramsInfo.m_ForgetGateBias =
1017 &cLayer->m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo();
1018 paramsInfo.m_CellBias =
1019 &cLayer->m_QuantizedLstmParameters.m_CellBias->GetTensorInfo();
1020 paramsInfo.m_OutputGateBias =
1021 &cLayer->m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo();;
James Conroyee18dc82019-07-17 11:27:46 +01001022
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001023 result = layerSupportObject.IsQuantizedLstmSupported(input,
1024 previousCellStateIn,
1025 previousOutputIn,
1026 cellStateOut,
1027 output,
1028 paramsInfo,
1029 reason);
James Conroyee18dc82019-07-17 11:27:46 +01001030 break;
1031 }
Francis Murtaghe7a86a42018-08-29 12:42:10 +01001032 case LayerType::Division:
1033 {
1034 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1035 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1036 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001037 result = layerSupportObject.IsDivisionSupported(
Francis Murtaghe7a86a42018-08-29 12:42:10 +01001038 OverrideDataType(input0, dataType),
1039 OverrideDataType(input1, dataType),
1040 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +01001041 reason);
Francis Murtaghe7a86a42018-08-29 12:42:10 +01001042 break;
1043 }
Finn Williams2605b232020-06-10 15:53:46 +01001044 case LayerType::Rank:
1045 {
1046 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1047 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001048 result = layerSupportObject.IsRankSupported(OverrideDataType(input, dataType),
1049 OverrideDataType(output, dataType),
1050 reason);
Finn Williams2605b232020-06-10 15:53:46 +01001051 break;
1052 }
telsoa014fcda012018-03-09 14:13:49 +00001053 case LayerType::Reshape:
1054 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001055 auto cLayer = PolymorphicDowncast<const ReshapeLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +00001056 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Kevin Maya023c402019-12-12 17:28:05 +00001057 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001058 result = layerSupportObject.IsReshapeSupported(OverrideDataType(input, dataType),
1059 OverrideDataType(output, dataType),
1060 cLayer->GetParameters(),
1061 reason);
telsoa014fcda012018-03-09 14:13:49 +00001062 break;
1063 }
Teresa Charlina9075df2019-06-27 15:41:57 +01001064 case LayerType::Resize:
1065 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001066 auto cLayer = PolymorphicDowncast<const ResizeLayer*>(&layer);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001067 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Teresa Charlina9075df2019-06-27 15:41:57 +01001068 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001069 result = layerSupportObject.IsResizeSupported(OverrideDataType(input, dataType),
1070 OverrideDataType(output, dataType),
1071 cLayer->GetParameters(),
1072 reason);
Teresa Charlina9075df2019-06-27 15:41:57 +01001073 break;
1074 }
Keith Davis3ae3f972021-05-21 16:33:48 +01001075 case LayerType::Shape:
1076 {
1077 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1078 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1079
1080 result = layerSupportObject.IsShapeSupported(OverrideDataType(input, dataType),
1081 OverrideDataType(output, dataType),
1082 reason);
1083 break;
1084 }
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001085 case LayerType::Slice:
1086 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001087 auto cLayer = PolymorphicDowncast<const SliceLayer*>(&layer);
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001088
1089 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1090 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1091
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001092 result = layerSupportObject.IsSliceSupported(OverrideDataType(input, dataType),
1093 OverrideDataType(output, dataType),
1094 cLayer->GetParameters(),
1095 reason);
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001096 break;
1097 }
telsoa014fcda012018-03-09 14:13:49 +00001098 case LayerType::Softmax:
1099 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001100 auto cLayer = PolymorphicDowncast<const SoftmaxLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +00001101 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +01001102 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001103 result = layerSupportObject.IsSoftmaxSupported(OverrideDataType(input, dataType),
1104 OverrideDataType(output, dataType),
1105 cLayer->GetParameters(),
1106 reason);
telsoa014fcda012018-03-09 14:13:49 +00001107 break;
1108 }
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +00001109 case LayerType::SpaceToBatchNd:
1110 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001111 auto cLayer = PolymorphicDowncast<const SpaceToBatchNdLayer*>(&layer);
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +00001112 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1113 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001114 result = layerSupportObject.IsSpaceToBatchNdSupported(OverrideDataType(input, dataType),
1115 OverrideDataType(output, dataType),
1116 cLayer->GetParameters(),
1117 reason);
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +00001118 break;
1119 }
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001120 case LayerType::SpaceToDepth:
1121 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001122 auto cLayer = PolymorphicDowncast<const SpaceToDepthLayer*>(&layer);
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001123
1124 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1125 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1126
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001127 result = layerSupportObject.IsSpaceToDepthSupported(OverrideDataType(input, dataType),
1128 OverrideDataType(output, dataType),
1129 cLayer->GetParameters(),
1130 reason);
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001131 break;
1132 }
telsoa014fcda012018-03-09 14:13:49 +00001133 case LayerType::Splitter:
1134 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001135 auto cLayer = PolymorphicDowncast<const SplitterLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +00001136 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001137
1138 // Get vector of all outputs.
1139 auto getTensorInfo = [&dataType](const OutputSlot& slot)
1140 {
1141 return OverrideDataType(slot.GetTensorInfo(), dataType);
1142 };
Finn Williams3e54d032020-10-22 16:53:35 +01001143 auto beginI = MakeTransformIterator(layer.GetOutputSlots().begin(), getTensorInfo);
1144 auto endI = MakeTransformIterator(layer.GetOutputSlots().end(), getTensorInfo);
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001145 std::vector<TensorInfo> outputs(beginI, endI);
1146
1147 const std::vector<std::reference_wrapper<TensorInfo>> outputPtrs(outputs.begin(), outputs.end());
1148
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001149 result = layerSupportObject.IsSplitterSupported(OverrideDataType(input, dataType),
1150 outputPtrs,
1151 cLayer->GetParameters(),
1152 reason);
telsoa014fcda012018-03-09 14:13:49 +00001153 break;
1154 }
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001155 case LayerType::Stack:
1156 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001157 auto cLayer = PolymorphicDowncast<const StackLayer*>(&layer);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001158
1159 // Get vector of all inputs.
1160 auto getTensorInfo = [&dataType](const InputSlot& slot)
1161 {
1162 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
1163 };
Finn Williams3e54d032020-10-22 16:53:35 +01001164 auto beginI = MakeTransformIterator(layer.GetInputSlots().begin(), getTensorInfo);
1165 auto endI = MakeTransformIterator(layer.GetInputSlots().end(), getTensorInfo);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001166 std::vector<TensorInfo> inputs(beginI, endI);
1167
1168 auto getTensorInfoPtr = [](const TensorInfo& info)
1169 {
1170 return &info;
1171 };
Finn Williams3e54d032020-10-22 16:53:35 +01001172 auto beginPtr = MakeTransformIterator(inputs.begin(), getTensorInfoPtr);
1173 auto endPtr = MakeTransformIterator(inputs.end(), getTensorInfoPtr);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001174 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
1175
1176 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1177
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001178 result = layerSupportObject.IsStackSupported(inputPtrs, output, cLayer->GetParameters(), reason);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001179
1180 break;
1181 }
Derek Lamberti013c3902019-10-21 10:46:16 +01001182 case LayerType::StandIn:
1183 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001184 auto cLayer = PolymorphicDowncast<const StandInLayer*>(&layer);
Derek Lamberti013c3902019-10-21 10:46:16 +01001185
1186 // Get vector of all inputs.
1187 auto getTensorInfoIn = [&dataType](const InputSlot& slot)
1188 {
1189 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
1190 };
1191 auto getTensorInfoOut = [&dataType](const OutputSlot& slot)
1192 {
1193 return OverrideDataType(slot.GetTensorInfo(), dataType);
1194 };
Finn Williams3e54d032020-10-22 16:53:35 +01001195 auto beginI = MakeTransformIterator(layer.GetInputSlots().begin(), getTensorInfoIn);
1196 auto endI = MakeTransformIterator(layer.GetInputSlots().end(), getTensorInfoIn);
Derek Lamberti013c3902019-10-21 10:46:16 +01001197 std::vector<TensorInfo> inputs(beginI, endI);
1198
Finn Williams3e54d032020-10-22 16:53:35 +01001199 auto beginO = MakeTransformIterator(layer.GetOutputSlots().begin(), getTensorInfoOut);
1200 auto endO = MakeTransformIterator(layer.GetOutputSlots().end(), getTensorInfoOut);
Derek Lamberti013c3902019-10-21 10:46:16 +01001201 std::vector<TensorInfo> outputs(beginO, endO);
1202
1203
1204 auto getTensorInfoPtr = [](const TensorInfo& info)
1205 {
1206 return &info;
1207 };
Finn Williams3e54d032020-10-22 16:53:35 +01001208 auto beginPtrI = MakeTransformIterator(inputs.begin(), getTensorInfoPtr);
1209 auto endPtrI = MakeTransformIterator(inputs.end(), getTensorInfoPtr);
Derek Lamberti013c3902019-10-21 10:46:16 +01001210 std::vector<const TensorInfo*> inputPtrs(beginPtrI, endPtrI);
1211
Finn Williams3e54d032020-10-22 16:53:35 +01001212 auto beginPtrO = MakeTransformIterator(outputs.begin(), getTensorInfoPtr);
1213 auto endPtrO = MakeTransformIterator(outputs.end(), getTensorInfoPtr);
Derek Lamberti013c3902019-10-21 10:46:16 +01001214 std::vector<const TensorInfo*> outputPtrs(beginPtrO, endPtrO);
1215
1216
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001217 result = layerSupportObject.IsStandInSupported(inputPtrs,
1218 outputPtrs,
1219 cLayer->GetParameters(),
1220 reason);
Derek Lamberti013c3902019-10-21 10:46:16 +01001221 break;
1222 }
Conor Kennedy430b5d82018-11-14 15:28:28 +00001223 case LayerType::StridedSlice:
1224 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001225 auto cLayer = PolymorphicDowncast<const StridedSliceLayer*>(&layer);
Conor Kennedy430b5d82018-11-14 15:28:28 +00001226 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1227 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001228 result = layerSupportObject.IsStridedSliceSupported(OverrideDataType(input, dataType),
1229 OverrideDataType(output, dataType),
1230 cLayer->GetParameters(),
1231 reason);
Conor Kennedy430b5d82018-11-14 15:28:28 +00001232 break;
1233 }
David Beckc2044fe2018-09-05 15:00:38 +01001234 case LayerType::Subtraction:
1235 {
1236 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1237 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1238 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001239 result = layerSupportObject.IsSubtractionSupported(
David Beckc2044fe2018-09-05 15:00:38 +01001240 OverrideDataType(input0, dataType),
1241 OverrideDataType(input1, dataType),
1242 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +01001243 reason);
David Beckc2044fe2018-09-05 15:00:38 +01001244 break;
1245 }
Sadik Armaganeff363d2019-04-05 15:25:46 +01001246 case LayerType::Switch:
1247 {
1248 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1249 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1250 const TensorInfo& output0 = layer.GetOutputSlot(0).GetTensorInfo();
1251 const TensorInfo& output1 = layer.GetOutputSlot(1).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001252 result = layerSupportObject.IsSwitchSupported(OverrideDataType(input0, dataType),
1253 OverrideDataType(input1, dataType),
1254 OverrideDataType(output0, dataType),
1255 OverrideDataType(output1, dataType),
1256 reason);
Sadik Armaganeff363d2019-04-05 15:25:46 +01001257 break;
1258 }
narpra0132b90462018-09-13 11:07:48 +01001259 case LayerType::Mean:
1260 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001261 auto cLayer = PolymorphicDowncast<const MeanLayer*>(&layer);
narpra0132b90462018-09-13 11:07:48 +01001262 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1263 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001264 result = layerSupportObject.IsMeanSupported(
narpra0132b90462018-09-13 11:07:48 +01001265 OverrideDataType(input, dataType),
1266 OverrideDataType(output, dataType),
1267 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +01001268 reason);
narpra0132b90462018-09-13 11:07:48 +01001269 break;
1270 }
kevmay0190539692018-11-29 08:40:19 +00001271 case LayerType::Minimum:
1272 {
1273 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1274 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1275 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001276 result = layerSupportObject.IsMinimumSupported(OverrideDataType(input0, dataType),
1277 OverrideDataType(input1, dataType),
1278 OverrideDataType(output, dataType),
1279 reason);
kevmay0190539692018-11-29 08:40:19 +00001280 break;
1281 }
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001282 case LayerType::Prelu:
1283 {
1284 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1285 const TensorInfo& alpha = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1286 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001287 result = layerSupportObject.IsPreluSupported(OverrideDataType(input, dataType),
1288 OverrideDataType(alpha, dataType),
1289 OverrideDataType(output, dataType),
1290 reason);
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001291 break;
1292 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001293 case LayerType::Transpose:
1294 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001295 auto cLayer = PolymorphicDowncast<const TransposeLayer*>(&layer);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001296 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1297 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001298 result = layerSupportObject.IsTransposeSupported(OverrideDataType(input, dataType),
1299 OverrideDataType(output, dataType),
1300 cLayer->GetParameters(),
1301 reason);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001302 break;
1303 }
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001304 case LayerType::TransposeConvolution2d:
1305 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001306 auto cLayer = PolymorphicDowncast<const TransposeConvolution2dLayer*>(&layer);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001307
1308 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1309 dataType);
1310 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1311
1312 const TransposeConvolution2dDescriptor& descriptor = cLayer->GetParameters();
1313
1314 Optional<TensorInfo> biases;
1315 if (descriptor.m_BiasEnabled)
1316 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001317 ARMNN_ASSERT(cLayer->m_Bias.get() != nullptr);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001318 biases = OverrideDataType(cLayer->m_Bias->GetTensorInfo(),
1319 GetBiasTypeFromWeightsType(dataType));
1320 }
1321
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001322 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001323 const TensorInfo weights = OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType);
1324
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001325 result = layerSupportObject.IsTransposeConvolution2dSupported(input,
1326 output,
1327 descriptor,
1328 weights,
1329 biases,
1330 reason);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001331
1332 break;
1333 }
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00001334 case LayerType::Reduce:
1335 {
1336 auto cLayer = PolymorphicDowncast<const ReduceLayer*>(&layer);
1337 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1338 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1339
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001340 result = layerSupportObject.IsReduceSupported(OverrideDataType(input, dataType),
1341 OverrideDataType(output, dataType),
1342 cLayer->GetParameters(),
1343 reason);
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00001344 break;
1345 }
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001346 case LayerType::UnidirectionalSequenceLstm:
1347 {
1348 auto cLayer = PolymorphicDowncast<const UnidirectionalSequenceLstmLayer*>(&layer);
1349 const UnidirectionalSequenceLstmDescriptor& descriptor = cLayer->GetParameters();
1350
1351 // All inputs.
1352 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1353 dataType);
1354 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
1355 dataType);
1356 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
1357 dataType);
1358 // Outputs
1359 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1360
1361 // Basic parameters
1362 const TensorInfo& inputToForgetWeights
1363 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
1364 const TensorInfo& inputToCellWeights
1365 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
1366 const TensorInfo& inputToOutputWeights
1367 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
1368 const TensorInfo& recurrentToForgetWeights
1369 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
1370 const TensorInfo& recurrentToCellWeights
1371 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
1372 const TensorInfo& recurrentToOutputWeights
1373 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
1374 const TensorInfo& forgetGateBias
1375 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
1376 const TensorInfo& cellBias
1377 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
1378 const TensorInfo& outputGateBias
1379 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
1380
1381 LstmInputParamsInfo paramsInfo;
1382
1383 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
1384 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
1385 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
1386 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
1387 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
1388 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
1389 paramsInfo.m_ForgetGateBias = &forgetGateBias;
1390 paramsInfo.m_CellBias = &cellBias;
1391 paramsInfo.m_OutputGateBias = &outputGateBias;
1392
1393 // Optional parameters
1394 TensorInfo optInputToInputWeights;
1395 TensorInfo optRecurrentToInputWeights;
1396 TensorInfo optCellToInputWeights;
1397 TensorInfo optInputGateBias;
1398 TensorInfo optProjectionWeights;
1399 TensorInfo optProjectionBias;
1400 TensorInfo optCellToForgetWeights;
1401 TensorInfo optCellToOutputWeights;
1402 TensorInfo optInputLayerNormWeights;
1403 TensorInfo optForgetLayerNormWeights;
1404 TensorInfo optCellLayerNormWeights;
1405 TensorInfo optOutputLayerNormWeights;
1406
1407 if(!descriptor.m_CifgEnabled)
1408 {
1409 optInputToInputWeights =
1410 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
1411 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
1412
1413 optRecurrentToInputWeights =
1414 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
1415 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
1416 optInputGateBias =
1417 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
1418 paramsInfo.m_InputGateBias = &optInputGateBias;
1419 }
1420
1421 if(descriptor.m_ProjectionEnabled)
1422 {
1423 optProjectionWeights =
1424 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
1425 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
1426 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
1427 {
1428 optProjectionBias =
1429 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
1430 paramsInfo.m_ProjectionBias = &optProjectionBias;
1431 }
1432 }
1433
1434 if(descriptor.m_PeepholeEnabled)
1435 {
1436 if(!descriptor.m_CifgEnabled)
1437 {
1438 optCellToInputWeights =
1439 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
1440 dataType);
1441 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
1442 }
1443 optCellToForgetWeights =
1444 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
1445 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
1446 optCellToOutputWeights =
1447 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
1448 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
1449 }
1450
1451 if(descriptor.m_LayerNormEnabled)
1452 {
1453 if (!descriptor.m_CifgEnabled)
1454 {
1455 optInputLayerNormWeights = OverrideDataType(
1456 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
1457 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
1458 }
1459
1460 optForgetLayerNormWeights = OverrideDataType(
1461 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
1462 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
1463
1464 optCellLayerNormWeights = OverrideDataType(
1465 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
1466 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
1467
1468 optOutputLayerNormWeights = OverrideDataType(
1469 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
1470 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
1471 }
1472
1473 Optional<TensorInfo> hiddenStateOut;
1474 Optional<TensorInfo> cellStateOut;
1475
1476 result = layerSupportObject.IsUnidirectionalSequenceLstmSupported(input,
1477 outputStateIn,
1478 cellStateIn,
1479 output,
1480 hiddenStateOut,
1481 cellStateOut,
1482 descriptor,
1483 paramsInfo,
1484 reason);
1485 break;
1486 }
telsoa014fcda012018-03-09 14:13:49 +00001487 default:
1488 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001489 ARMNN_ASSERT_MSG(false, "WorkloadFactory did not recognise type of layer.");
David Beck33f0ae02018-10-18 15:13:56 +01001490 reason.value() = "Unrecognised layer type";
telsoa014fcda012018-03-09 14:13:49 +00001491 result = false;
1492 break;
1493 }
1494 }
telsoa014fcda012018-03-09 14:13:49 +00001495 return result;
1496}
1497
Sadik Armagan045f6be2020-09-10 13:37:32 +01001498bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
1499 const IConnectableLayer& connectableLayer,
1500 Optional<DataType> dataType,
1501 std::string& outReasonIfUnsupported)
1502{
1503 return IsLayerConfigurationSupported(backendId, connectableLayer, dataType, outReasonIfUnsupported);
1504}
1505
David Beckdcb751f2018-10-03 11:42:42 +01001506bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +01001507 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +01001508 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +00001509{
Jan Eilersbb446e52020-04-02 13:56:54 +01001510 auto layer = PolymorphicDowncast<const Layer*>(&connectableLayer);
Sadik Armagan045f6be2020-09-10 13:37:32 +01001511 return IsLayerConfigurationSupported(layer->GetBackendId(), connectableLayer, dataType, outReasonIfUnsupported);
1512}
1513
1514// TODO merge with defaulted modelOptions above
1515bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
1516 Optional<DataType> dataType,
1517 std::string& outReasonIfUnsupported,
1518 const ModelOptions& modelOptions)
1519{
1520 auto layer = PolymorphicDowncast<const Layer*>(&connectableLayer);
1521 return IsLayerConfigurationSupported(layer->GetBackendId(),
1522 connectableLayer,
1523 dataType,
1524 outReasonIfUnsupported,
1525 modelOptions);
telsoa014fcda012018-03-09 14:13:49 +00001526}
1527
Sadik Armagan04a72972020-09-14 15:44:18 +01001528bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
1529 const IConnectableLayer& connectableLayer,
1530 Optional<DataType> dataType,
1531 std::string& outReasonIfUnsupported,
1532 const ModelOptions& modelOptions)
1533{
1534 return IsLayerConfigurationSupported(backendId,
1535 connectableLayer,
1536 dataType,
1537 outReasonIfUnsupported,
1538 modelOptions);
1539}
Teresa Charlin611c7fb2022-01-07 09:47:29 +00001540ARMNN_NO_DEPRECATE_WARN_BEGIN
1541std::unique_ptr<IWorkload> IWorkloadFactory::CreateWorkload(LayerType type,
1542 const QueueDescriptor& descriptor,
1543 const WorkloadInfo& info) const
1544{
1545 switch(type)
1546 {
1547 case LayerType::Activation :
1548 {
1549 auto activationQueueDescriptor = PolymorphicDowncast<const ActivationQueueDescriptor*>(&descriptor);
1550 return CreateActivation(*activationQueueDescriptor, info);
1551 }
1552 case LayerType::Addition :
1553 {
1554 auto additionQueueDescriptor = PolymorphicDowncast<const AdditionQueueDescriptor*>(&descriptor);
1555 return CreateAddition(*additionQueueDescriptor, info);
1556 }
1557 case LayerType::ArgMinMax :
1558 {
1559 auto argMinMaxQueueDescriptor = PolymorphicDowncast<const ArgMinMaxQueueDescriptor*>(&descriptor);
1560 return CreateArgMinMax(*argMinMaxQueueDescriptor, info);
1561 }
1562 case LayerType::BatchNormalization :
1563 {
1564 auto batchNormQueueDescriptor = PolymorphicDowncast<const BatchNormalizationQueueDescriptor*>(&descriptor);
1565 return CreateBatchNormalization(*batchNormQueueDescriptor, info);
1566 }
1567 case LayerType::BatchToSpaceNd :
1568 {
1569 auto batchToSpaceNdQueueDescriptor
1570 = PolymorphicDowncast<const BatchToSpaceNdQueueDescriptor*>(&descriptor);
1571 return CreateBatchToSpaceNd(*batchToSpaceNdQueueDescriptor, info);
1572 }
1573 case LayerType::Cast :
1574 {
1575 auto castQueueDescriptor = PolymorphicDowncast<const CastQueueDescriptor*>(&descriptor);
1576 return CreateCast(*castQueueDescriptor, info);
1577 }
1578 case LayerType::ChannelShuffle :
1579 {
1580 auto channelShuffleQueueDescriptor
1581 = PolymorphicDowncast<const ChannelShuffleQueueDescriptor*>(&descriptor);
1582 return CreateChannelShuffle(*channelShuffleQueueDescriptor, info);
1583 }
1584 case LayerType::Comparison :
1585 {
1586 auto comparisonQueueDescriptor = PolymorphicDowncast<const ComparisonQueueDescriptor*>(&descriptor);
1587 return CreateComparison(*comparisonQueueDescriptor, info);
1588 }
1589 case LayerType::Concat :
1590 {
1591 auto concatQueueDescriptor = PolymorphicDowncast<const ConcatQueueDescriptor*>(&descriptor);
1592 return CreateConcat(*concatQueueDescriptor, info);
1593 }
1594 case LayerType::Constant :
1595 {
1596 auto constantQueueDescriptor = PolymorphicDowncast<const ConstantQueueDescriptor*>(&descriptor);
1597 return CreateConstant(*constantQueueDescriptor, info);
1598 }
1599 case LayerType::ConvertBf16ToFp32 :
1600 {
1601 auto convertBf16ToFp32QueueDescriptor
1602 = PolymorphicDowncast<const ConvertBf16ToFp32QueueDescriptor*>(&descriptor);
1603 return CreateConvertBf16ToFp32(*convertBf16ToFp32QueueDescriptor, info);
1604 }
1605 case LayerType::ConvertFp16ToFp32:
1606 {
1607 auto convertFp16ToFp32QueueDescriptor
1608 = PolymorphicDowncast<const ConvertFp16ToFp32QueueDescriptor*>(&descriptor);
1609 return CreateConvertFp16ToFp32(*convertFp16ToFp32QueueDescriptor, info);
1610 }
1611 case LayerType::ConvertFp32ToBf16:
1612 {
1613 auto convertFp32ToBf16QueueDescriptor
1614 = PolymorphicDowncast<const ConvertFp32ToBf16QueueDescriptor*>(&descriptor);
1615 return CreateConvertFp32ToBf16(*convertFp32ToBf16QueueDescriptor, info);
1616 }
1617 case LayerType::ConvertFp32ToFp16:
1618 {
1619 auto convertFp32ToFp16QueueDescriptor
1620 = PolymorphicDowncast<const ConvertFp32ToFp16QueueDescriptor*>(&descriptor);
1621 return CreateConvertFp32ToFp16(*convertFp32ToFp16QueueDescriptor, info);
1622 }
1623 case LayerType::Convolution2d:
1624 {
1625 auto convolution2dQueueDescriptor = PolymorphicDowncast<const Convolution2dQueueDescriptor*>(&descriptor);
1626 return CreateConvolution2d(*convolution2dQueueDescriptor, info);
1627 }
1628 case LayerType::Convolution3d:
1629 {
1630 auto convolution3dQueueDescriptor = PolymorphicDowncast<const Convolution3dQueueDescriptor*>(&descriptor);
1631 return CreateConvolution3d(*convolution3dQueueDescriptor, info);
1632 }
1633 case LayerType::Debug:
1634 {
1635 auto debugQueueDescriptor = PolymorphicDowncast<const DebugQueueDescriptor*>(&descriptor);
1636 return CreateDebug(*debugQueueDescriptor, info);
1637 }
1638 case LayerType::DepthToSpace:
1639 {
1640 auto depthToSpaceQueueDescriptor = PolymorphicDowncast<const DepthToSpaceQueueDescriptor*>(&descriptor);
1641 return CreateDepthToSpace(*depthToSpaceQueueDescriptor, info);
1642 }
1643 case LayerType::DepthwiseConvolution2d:
1644 {
1645 auto depthwiseConvolution2DQueueDescriptor
1646 = PolymorphicDowncast<const DepthwiseConvolution2dQueueDescriptor*>(&descriptor);
1647 return CreateDepthwiseConvolution2d(*depthwiseConvolution2DQueueDescriptor, info);
1648 }
1649 case LayerType::Dequantize:
1650 {
1651 auto dequantizeQueueDescriptor = PolymorphicDowncast<const DequantizeQueueDescriptor*>(&descriptor);
1652 return CreateDequantize(*dequantizeQueueDescriptor, info);
1653 }
1654 case LayerType::DetectionPostProcess:
1655 {
1656 auto detectionPostProcessQueueDescriptor
1657 = PolymorphicDowncast<const DetectionPostProcessQueueDescriptor*>(&descriptor);
1658 return CreateDetectionPostProcess(*detectionPostProcessQueueDescriptor, info);
1659 }
1660 case LayerType::Division:
1661 {
1662 auto divisionQueueDescriptor = PolymorphicDowncast<const DivisionQueueDescriptor*>(&descriptor);
1663 return CreateDivision(*divisionQueueDescriptor, info);
1664 }
1665 case LayerType::ElementwiseUnary:
1666 {
1667 auto elementwiseUnaryQueueDescriptor
1668 = PolymorphicDowncast<const ElementwiseUnaryQueueDescriptor*>(&descriptor);
1669 return CreateElementwiseUnary(*elementwiseUnaryQueueDescriptor, info);
1670
1671 }
1672 case LayerType::FakeQuantization:
1673 {
1674 auto fakeQuantizationQueueDescriptor
1675 = PolymorphicDowncast<const FakeQuantizationQueueDescriptor*>(&descriptor);
1676 return CreateFakeQuantization(*fakeQuantizationQueueDescriptor, info);
1677 }
1678 case LayerType::Fill:
1679 {
1680 auto fillQueueDescriptor = PolymorphicDowncast<const FillQueueDescriptor*>(&descriptor);
1681 return CreateFill(*fillQueueDescriptor, info);
1682 }
1683 case LayerType::Floor:
1684 {
1685 auto floorQueueDescriptor = PolymorphicDowncast<const FloorQueueDescriptor*>(&descriptor);
1686 return CreateFloor(*floorQueueDescriptor, info);
1687 }
1688 case LayerType::FullyConnected:
1689 {
1690 auto fullyConnectedQueueDescriptor
1691 = PolymorphicDowncast<const FullyConnectedQueueDescriptor*>(&descriptor);
1692 return CreateFullyConnected(*fullyConnectedQueueDescriptor, info);
1693 }
1694 case LayerType::Gather:
1695 {
1696 auto gatherQueueDescriptor = PolymorphicDowncast<const GatherQueueDescriptor*>(&descriptor);
1697 return CreateGather(*gatherQueueDescriptor, info);
1698 }
1699 case LayerType::Input:
1700 {
1701 auto inputQueueDescriptor = PolymorphicDowncast<const InputQueueDescriptor*>(&descriptor);
1702 return CreateInput(*inputQueueDescriptor, info);
1703 }
1704 case LayerType::InstanceNormalization:
1705 {
1706 auto instanceNormalizationQueueDescriptor
1707 = PolymorphicDowncast<const InstanceNormalizationQueueDescriptor*>(&descriptor);
1708 return CreateInstanceNormalization(*instanceNormalizationQueueDescriptor, info);
1709 }
1710 case LayerType::L2Normalization:
1711 {
1712 auto l2NormalizationQueueDescriptor
1713 = PolymorphicDowncast<const L2NormalizationQueueDescriptor*>(&descriptor);
1714 return CreateL2Normalization(*l2NormalizationQueueDescriptor, info);
1715 }
1716 case LayerType::LogicalBinary:
1717 {
1718 auto logicalBinaryQueueDescriptor = PolymorphicDowncast<const LogicalBinaryQueueDescriptor*>(&descriptor);
1719 return CreateLogicalBinary(*logicalBinaryQueueDescriptor, info);
1720 }
1721 case LayerType::LogSoftmax:
1722 {
1723 auto logSoftmaxQueueDescriptor = PolymorphicDowncast<const LogSoftmaxQueueDescriptor*>(&descriptor);
1724 return CreateLogSoftmax(*logSoftmaxQueueDescriptor, info);
1725 }
1726 case LayerType::Lstm:
1727 {
1728 auto lstmQueueDescriptor = PolymorphicDowncast<const LstmQueueDescriptor*>(&descriptor);
1729 return CreateLstm(*lstmQueueDescriptor, info);
1730 }
1731 case LayerType::Maximum:
1732 {
1733 auto maximumQueueDescriptor = PolymorphicDowncast<const MaximumQueueDescriptor*>(&descriptor);
1734 return CreateMaximum(*maximumQueueDescriptor, info);
1735 }
1736 case LayerType::Mean:
1737 {
1738 auto meanQueueDescriptor = PolymorphicDowncast<const MeanQueueDescriptor*>(&descriptor);
1739 return CreateMean(*meanQueueDescriptor, info);
1740 }
1741 case LayerType::MemCopy:
1742 {
1743 auto memCopyQueueDescriptor = PolymorphicDowncast<const MemCopyQueueDescriptor*>(&descriptor);
1744 return CreateMemCopy(*memCopyQueueDescriptor, info);
1745 }
1746 case LayerType::MemImport:
1747 {
1748 auto memImportQueueDescriptor = PolymorphicDowncast<const MemImportQueueDescriptor*>(&descriptor);
1749 return CreateMemImport(*memImportQueueDescriptor, info);
1750 }
1751 case LayerType::Minimum:
1752 {
1753 auto minimumQueueDescriptor = PolymorphicDowncast<const MinimumQueueDescriptor*>(&descriptor);
1754 return CreateMinimum(*minimumQueueDescriptor, info);
1755 }
1756 case LayerType::Multiplication:
1757 {
1758 auto multiplicationQueueDescriptor
1759 = PolymorphicDowncast<const MultiplicationQueueDescriptor*>(&descriptor);
1760 return CreateMultiplication(*multiplicationQueueDescriptor, info);
1761 }
1762 case LayerType::Normalization:
1763 {
1764 auto normalizationQueueDescriptor = PolymorphicDowncast<const NormalizationQueueDescriptor*>(&descriptor);
1765 return CreateNormalization(*normalizationQueueDescriptor, info);
1766 }
1767 case LayerType::Output:
1768 {
1769 auto outputQueueDescriptor = PolymorphicDowncast<const OutputQueueDescriptor*>(&descriptor);
1770 return CreateOutput(*outputQueueDescriptor, info);
1771 }
1772 case LayerType::Pad:
1773 {
1774 auto padQueueDescriptor = PolymorphicDowncast<const PadQueueDescriptor*>(&descriptor);
1775 return CreatePad(*padQueueDescriptor, info);
1776 }
1777 case LayerType::Permute:
1778 {
1779 auto permuteQueueDescriptor = PolymorphicDowncast<const PermuteQueueDescriptor*>(&descriptor);
1780 return CreatePermute(*permuteQueueDescriptor, info);
1781 }
1782 case LayerType::Pooling2d:
1783 {
1784 auto pooling2dQueueDescriptor = PolymorphicDowncast<const Pooling2dQueueDescriptor*>(&descriptor);
1785 return CreatePooling2d(*pooling2dQueueDescriptor, info);
1786 }
1787 case LayerType::Pooling3d:
1788 {
1789 auto pooling3dQueueDescriptor = PolymorphicDowncast<const Pooling3dQueueDescriptor*>(&descriptor);
1790 return CreatePooling3d(*pooling3dQueueDescriptor, info);
1791 }
1792 case LayerType::PreCompiled:
1793 {
1794 auto preCompiledQueueDescriptor = PolymorphicDowncast<const PreCompiledQueueDescriptor*>(&descriptor);
1795 return CreatePreCompiled(*preCompiledQueueDescriptor, info);
1796 }
1797 case LayerType::Prelu:
1798 {
1799 auto preluQueueDescriptor = PolymorphicDowncast<const PreluQueueDescriptor*>(&descriptor);
1800 return CreatePrelu(*preluQueueDescriptor, info);
1801 }
1802 case LayerType::QLstm:
1803 {
1804 auto qlstmQueueDescriptor = PolymorphicDowncast<const QLstmQueueDescriptor*>(&descriptor);
1805 return CreateQLstm(*qlstmQueueDescriptor, info);
1806 }
1807 case LayerType::Quantize:
1808 {
1809 auto quantizeQueueDescriptor = PolymorphicDowncast<const QuantizeQueueDescriptor*>(&descriptor);
1810 return CreateQuantize(*quantizeQueueDescriptor, info);
1811 }
1812 case LayerType::Rank:
1813 {
1814 auto rankQueueDescriptor = PolymorphicDowncast<const RankQueueDescriptor*>(&descriptor);
1815 return CreateRank(*rankQueueDescriptor, info);
1816 }
1817 case LayerType::Reduce:
1818 {
1819 auto reduceQueueDescriptor = PolymorphicDowncast<const ReduceQueueDescriptor*>(&descriptor);
1820 return CreateReduce(*reduceQueueDescriptor, info);
1821 }
1822 case LayerType::Reshape:
1823 {
1824 auto reshapeQueueDescriptor = PolymorphicDowncast<const ReshapeQueueDescriptor*>(&descriptor);
1825 return CreateReshape(*reshapeQueueDescriptor, info);
1826 }
1827 case LayerType::Resize:
1828 {
1829 auto resizeQueueDescriptor = PolymorphicDowncast<const ResizeQueueDescriptor*>(&descriptor);
1830 return CreateResize(*resizeQueueDescriptor, info);
1831 }
1832 case LayerType::Shape:
1833 {
1834 auto shapeQueueDescriptor = PolymorphicDowncast<const ShapeQueueDescriptor*>(&descriptor);
1835 return CreateShape(*shapeQueueDescriptor, info);
1836 }
1837 case LayerType::Slice:
1838 {
1839 auto sliceQueueDescriptor = PolymorphicDowncast<const SliceQueueDescriptor*>(&descriptor);
1840 return CreateSlice(*sliceQueueDescriptor, info);
1841 }
1842 case LayerType::Softmax:
1843 {
1844 auto softmaxQueueDescriptor = PolymorphicDowncast<const SoftmaxQueueDescriptor*>(&descriptor);
1845 return CreateSoftmax(*softmaxQueueDescriptor, info);
1846 }
1847 case LayerType::SpaceToBatchNd:
1848 {
1849 auto spaceToBatchNdQueueDescriptor
1850 = PolymorphicDowncast<const SpaceToBatchNdQueueDescriptor*>(&descriptor);
1851 return CreateSpaceToBatchNd(*spaceToBatchNdQueueDescriptor, info);
1852 }
1853 case LayerType::SpaceToDepth:
1854 {
1855 auto spaceToDepthQueueDescriptor = PolymorphicDowncast<const SpaceToDepthQueueDescriptor*>(&descriptor);
1856 return CreateSpaceToDepth(*spaceToDepthQueueDescriptor, info);
1857 }
1858 case LayerType::Splitter:
1859 {
1860 auto splitterQueueDescriptor = PolymorphicDowncast<const SplitterQueueDescriptor*>(&descriptor);
1861 return CreateSplitter(*splitterQueueDescriptor, info);
1862 }
1863 case LayerType::Stack:
1864 {
1865 auto stackQueueDescriptor = PolymorphicDowncast<const StackQueueDescriptor*>(&descriptor);
1866 return CreateStack(*stackQueueDescriptor, info);
1867 }
1868 case LayerType::StridedSlice:
1869 {
1870 auto stridedSliceQueueDescriptor = PolymorphicDowncast<const StridedSliceQueueDescriptor*>(&descriptor);
1871 return CreateStridedSlice(*stridedSliceQueueDescriptor, info);
1872 }
1873 case LayerType::Subtraction:
1874 {
1875 auto subtractionQueueDescriptor = PolymorphicDowncast<const SubtractionQueueDescriptor*>(&descriptor);
1876 return CreateSubtraction(*subtractionQueueDescriptor, info);
1877 }
1878 case LayerType::Transpose:
1879 {
1880 auto transposeQueueDescriptor = PolymorphicDowncast<const TransposeQueueDescriptor*>(&descriptor);
1881 return CreateTranspose(*transposeQueueDescriptor, info);
1882 }
1883 case LayerType::TransposeConvolution2d:
1884 {
1885 auto transposeConvolution2dQueueDescriptor
1886 = PolymorphicDowncast<const TransposeConvolution2dQueueDescriptor*>(&descriptor);
1887 return CreateTransposeConvolution2d(*transposeConvolution2dQueueDescriptor, info);
1888 }
1889 case LayerType::UnidirectionalSequenceLstm:
1890 {
1891 auto unidirectionalSequenceLstmQueueDescriptor
1892 = PolymorphicDowncast<const UnidirectionalSequenceLstmQueueDescriptor*>(&descriptor);
1893 return CreateUnidirectionalSequenceLstm(*unidirectionalSequenceLstmQueueDescriptor, info);
1894 }
1895 default:
1896 return nullptr;
1897 }
1898}
1899ARMNN_NO_DEPRECATE_WARN_END
Sadik Armagan04a72972020-09-14 15:44:18 +01001900
Derek Lamberti901ea112019-12-10 22:07:09 +00001901std::unique_ptr<IWorkload> IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& /*descriptor*/,
1902 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001903{
1904 return std::unique_ptr<IWorkload>();
1905}
1906
Derek Lamberti901ea112019-12-10 22:07:09 +00001907std::unique_ptr<IWorkload> IWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& /*descriptor*/,
1908 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001909{
1910 return std::unique_ptr<IWorkload>();
1911}
1912
Derek Lamberti901ea112019-12-10 22:07:09 +00001913std::unique_ptr<IWorkload> IWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& /*descriptor*/,
1914 const WorkloadInfo& /*info*/) const
Nikhil Rajee391d52019-09-05 17:50:44 +01001915{
1916 return std::unique_ptr<IWorkload>();
1917}
1918
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001919std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00001920 const BatchNormalizationQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001921{
1922 return std::unique_ptr<IWorkload>();
1923}
1924
Derek Lamberti901ea112019-12-10 22:07:09 +00001925std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& /*desc*/,
1926 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001927{
1928 return std::unique_ptr<IWorkload>();
1929}
1930
mathad01b392e982021-04-07 12:07:30 +01001931std::unique_ptr<IWorkload> IWorkloadFactory::CreateCast(const CastQueueDescriptor& /*descriptor*/,
1932 const WorkloadInfo& /*info*/) const
1933{
1934 return std::unique_ptr<IWorkload>();
1935}
1936
Simon Obute51f67772021-09-03 15:50:13 +01001937std::unique_ptr<IWorkload> IWorkloadFactory::CreateChannelShuffle(const ChannelShuffleQueueDescriptor& /*descriptor*/,
1938 const WorkloadInfo& /*info*/) const
1939{
1940 return std::unique_ptr<IWorkload>();
1941}
1942
Derek Lamberti901ea112019-12-10 22:07:09 +00001943std::unique_ptr<IWorkload> IWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& /*descriptor*/,
1944 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001945{
1946 return std::unique_ptr<IWorkload>();
1947}
1948
Derek Lamberti901ea112019-12-10 22:07:09 +00001949std::unique_ptr<IWorkload> IWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& /*descriptor*/,
1950 const WorkloadInfo& /*info*/) const
Jim Flynn4ed6c832019-05-20 11:02:46 +01001951{
1952 return std::unique_ptr<IWorkload>();
1953}
1954
Derek Lamberti901ea112019-12-10 22:07:09 +00001955std::unique_ptr<IWorkload> IWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& /*descriptor*/,
1956 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001957{
1958 return std::unique_ptr<IWorkload>();
1959}
1960
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +00001961std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertBf16ToFp32(const ConvertBf16ToFp32QueueDescriptor& /*desc*/,
1962 const WorkloadInfo& /*info*/) const
1963{
1964 return std::unique_ptr<IWorkload>();
1965}
1966
Derek Lamberti901ea112019-12-10 22:07:09 +00001967std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp16ToFp32(const ConvertFp16ToFp32QueueDescriptor& /*desc*/,
1968 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001969{
1970 return std::unique_ptr<IWorkload>();
1971}
1972
Narumol Prangnawaratea54a012020-03-16 16:36:10 +00001973std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToBf16(const ConvertFp32ToBf16QueueDescriptor& /*desc*/,
1974 const WorkloadInfo& /*info*/) const
1975{
1976 return std::unique_ptr<IWorkload>();
1977}
1978
Derek Lamberti901ea112019-12-10 22:07:09 +00001979std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToFp16(const ConvertFp32ToFp16QueueDescriptor& /*desc*/,
1980 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001981{
1982 return std::unique_ptr<IWorkload>();
1983}
1984
Derek Lamberti901ea112019-12-10 22:07:09 +00001985std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& /*descriptor*/,
1986 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001987{
1988 return std::unique_ptr<IWorkload>();
1989}
1990
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001991std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution3d(const Convolution3dQueueDescriptor& /*descriptor*/,
1992 const WorkloadInfo& /*info*/) const
1993{
1994 return std::unique_ptr<IWorkload>();
1995}
1996
Derek Lamberti901ea112019-12-10 22:07:09 +00001997std::unique_ptr<IWorkload> IWorkloadFactory::CreateDebug(const DebugQueueDescriptor& /*descriptor*/,
1998 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001999{
2000 return std::unique_ptr<IWorkload>();
2001}
2002
Derek Lamberti901ea112019-12-10 22:07:09 +00002003std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& /*descriptor*/,
2004 const WorkloadInfo& /*info*/) const
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +01002005{
2006 return std::unique_ptr<IWorkload>();
2007}
2008
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002009std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthwiseConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00002010 const DepthwiseConvolution2dQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002011{
2012 return std::unique_ptr<IWorkload>();
2013}
2014
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002015std::unique_ptr<IWorkload> IWorkloadFactory::CreateDequantize(
Derek Lamberti901ea112019-12-10 22:07:09 +00002016 const DequantizeQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002017{
2018 return std::unique_ptr<IWorkload>();
2019}
2020
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002021std::unique_ptr<IWorkload> IWorkloadFactory::CreateDetectionPostProcess(
Derek Lamberti901ea112019-12-10 22:07:09 +00002022 const DetectionPostProcessQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002023{
2024 return std::unique_ptr<IWorkload>();
2025}
2026
Derek Lamberti901ea112019-12-10 22:07:09 +00002027std::unique_ptr<IWorkload> IWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& /*descriptor*/,
2028 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002029{
2030 return std::unique_ptr<IWorkload>();
2031}
2032
josh minor4a3c6102020-01-06 16:40:46 -06002033std::unique_ptr<IWorkload> IWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& /*desc*/,
2034 const WorkloadInfo& /*info*/) const
2035{
2036 return std::unique_ptr<IWorkload>();
2037}
2038
Derek Lamberti901ea112019-12-10 22:07:09 +00002039std::unique_ptr<IWorkload> IWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& /*desc*/,
2040 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002041{
2042 return std::unique_ptr<IWorkload>();
2043}
2044
Ryan OSheaec6c6802020-06-05 17:17:06 +01002045std::unique_ptr<IWorkload> IWorkloadFactory::CreateFill(const FillQueueDescriptor& /*descriptor*/,
2046 const WorkloadInfo& /*info*/) const
2047{
2048 return std::unique_ptr<IWorkload>();
2049}
2050
Derek Lamberti901ea112019-12-10 22:07:09 +00002051std::unique_ptr<IWorkload> IWorkloadFactory::CreateFloor(const FloorQueueDescriptor& /*descriptor*/,
2052 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002053{
2054 return std::unique_ptr<IWorkload>();
2055}
2056
Derek Lamberti901ea112019-12-10 22:07:09 +00002057std::unique_ptr<IWorkload> IWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& /*descriptor*/,
2058 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002059{
2060 return std::unique_ptr<IWorkload>();
2061}
2062
Derek Lamberti901ea112019-12-10 22:07:09 +00002063std::unique_ptr<IWorkload> IWorkloadFactory::CreateGather(const GatherQueueDescriptor& /*descriptor*/,
2064 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002065{
2066 return std::unique_ptr<IWorkload>();
2067}
2068
Kevin Mayce5045a2019-10-02 14:07:47 +01002069std::unique_ptr<IWorkload> IWorkloadFactory::CreateInstanceNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00002070 const InstanceNormalizationQueueDescriptor& /*descriptor*/,
2071 const WorkloadInfo& /*info*/) const
Kevin Mayce5045a2019-10-02 14:07:47 +01002072{
2073 return std::unique_ptr<IWorkload>();
2074}
2075
Derek Lamberti901ea112019-12-10 22:07:09 +00002076std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& /*desc*/,
2077 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002078{
2079 return std::unique_ptr<IWorkload>();
2080}
2081
James Conroyaba90cd2020-11-06 16:28:18 +00002082std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& /*desc*/,
2083 const WorkloadInfo& /*info*/) const
2084{
2085 return std::unique_ptr<IWorkload>();
2086}
2087
2088std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogicalUnary(const ElementwiseUnaryQueueDescriptor& /*desc*/,
2089 const WorkloadInfo& /*info*/) const
2090{
2091 return std::unique_ptr<IWorkload>();
2092}
2093
Derek Lamberti901ea112019-12-10 22:07:09 +00002094std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& /*descriptor*/,
2095 const WorkloadInfo& /*info*/) const
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +01002096{
2097 return std::unique_ptr<IWorkload>();
2098}
2099
Derek Lamberti901ea112019-12-10 22:07:09 +00002100std::unique_ptr<IWorkload> IWorkloadFactory::CreateLstm(const LstmQueueDescriptor& /*descriptor*/,
2101 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002102{
2103 return std::unique_ptr<IWorkload>();
2104}
2105
Derek Lamberti901ea112019-12-10 22:07:09 +00002106std::unique_ptr<IWorkload> IWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& /*descriptor*/,
2107 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002108{
2109 return std::unique_ptr<IWorkload>();
2110}
2111
Derek Lamberti901ea112019-12-10 22:07:09 +00002112std::unique_ptr<IWorkload> IWorkloadFactory::CreateMean(const MeanQueueDescriptor& /*descriptor*/,
2113 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002114{
2115 return std::unique_ptr<IWorkload>();
2116}
2117
Derek Lamberti901ea112019-12-10 22:07:09 +00002118std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& /*descriptor*/,
2119 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002120{
2121 return std::unique_ptr<IWorkload>();
2122}
2123
Derek Lamberti901ea112019-12-10 22:07:09 +00002124std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& /*descriptor*/,
2125 const WorkloadInfo& /*info*/) const
Derek Lambertif674aa02019-08-01 15:56:25 +01002126{
2127 return std::unique_ptr<IWorkload>();
2128}
2129
Derek Lamberti901ea112019-12-10 22:07:09 +00002130std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerge(const MergeQueueDescriptor& /*descriptor*/,
2131 const WorkloadInfo& /*info*/) const
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002132{
2133 return std::unique_ptr<IWorkload>();
2134}
2135
Derek Lamberti901ea112019-12-10 22:07:09 +00002136std::unique_ptr<IWorkload> IWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& /*descriptor*/,
2137 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002138{
2139 return std::unique_ptr<IWorkload>();
2140}
2141
Derek Lamberti901ea112019-12-10 22:07:09 +00002142std::unique_ptr<IWorkload> IWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& /*descriptor*/,
2143 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002144{
2145 return std::unique_ptr<IWorkload>();
2146}
2147
Derek Lamberti901ea112019-12-10 22:07:09 +00002148std::unique_ptr<IWorkload> IWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& /*descriptor*/,
2149 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002150{
2151 return std::unique_ptr<IWorkload>();
2152}
2153
Derek Lamberti901ea112019-12-10 22:07:09 +00002154std::unique_ptr<IWorkload> IWorkloadFactory::CreateOutput(const OutputQueueDescriptor& /*descriptor*/,
2155 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002156{
2157 return std::unique_ptr<IWorkload>();
2158}
2159
Derek Lamberti901ea112019-12-10 22:07:09 +00002160std::unique_ptr<IWorkload> IWorkloadFactory::CreatePad(const PadQueueDescriptor& /*descriptor*/,
2161 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002162{
2163 return std::unique_ptr<IWorkload>();
2164}
2165
Derek Lamberti901ea112019-12-10 22:07:09 +00002166std::unique_ptr<IWorkload> IWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& /*descriptor*/,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002167 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002168{
2169 return std::unique_ptr<IWorkload>();
2170}
2171
Derek Lamberti901ea112019-12-10 22:07:09 +00002172std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& /*descriptor*/,
2173 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002174{
2175 return std::unique_ptr<IWorkload>();
2176}
2177
Tamás Nyíri7b885b32021-10-26 14:47:57 +01002178std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling3d(const Pooling3dQueueDescriptor& /*descriptor*/,
2179 const WorkloadInfo& /*info*/) const
2180{
2181 return std::unique_ptr<IWorkload>();
2182}
2183
Derek Lamberti901ea112019-12-10 22:07:09 +00002184std::unique_ptr<IWorkload> IWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
2185 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002186{
2187 return std::unique_ptr<IWorkload>();
2188}
2189
Derek Lamberti901ea112019-12-10 22:07:09 +00002190std::unique_ptr<IWorkload> IWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &/*descriptor*/,
2191 const WorkloadInfo &/*info*/) const
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01002192{
2193 return std::unique_ptr<IWorkload>();
2194}
2195
Derek Lamberti901ea112019-12-10 22:07:09 +00002196std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& /*descriptor*/,
2197 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002198{
2199 return std::unique_ptr<IWorkload>();
2200}
2201
James Conroy586a9aa2020-03-20 08:49:33 +00002202std::unique_ptr<IWorkload> IWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& /*descriptor*/,
2203 const WorkloadInfo& /*info*/) const
2204{
2205 return std::unique_ptr<IWorkload>();
2206}
2207
Derek Lamberti901ea112019-12-10 22:07:09 +00002208std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& /*descriptor*/,
2209 const WorkloadInfo& /*info*/) const
James Conroyee18dc82019-07-17 11:27:46 +01002210{
2211 return std::unique_ptr<IWorkload>();
2212}
Finn Williams2605b232020-06-10 15:53:46 +01002213std::unique_ptr<IWorkload> IWorkloadFactory::CreateRank(const RankQueueDescriptor& /*descriptor*/,
2214 const WorkloadInfo& /*info*/) const
2215{
2216 return std::unique_ptr<IWorkload>();
2217}
James Conroyee18dc82019-07-17 11:27:46 +01002218
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002219std::unique_ptr<IWorkload> IWorkloadFactory::CreateReduce(const ReduceQueueDescriptor& /*descriptor*/,
2220 const WorkloadInfo& /*info*/) const
2221{
2222 return std::unique_ptr<IWorkload>();
2223}
2224
Derek Lamberti901ea112019-12-10 22:07:09 +00002225std::unique_ptr<IWorkload> IWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& /*descriptor*/,
2226 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002227{
2228 return std::unique_ptr<IWorkload>();
2229}
2230
Derek Lamberti901ea112019-12-10 22:07:09 +00002231std::unique_ptr<IWorkload> IWorkloadFactory::CreateResize(const ResizeQueueDescriptor& /*descriptor*/,
2232 const WorkloadInfo& /*info*/) const
Teresa Charlina9075df2019-06-27 15:41:57 +01002233{
2234 return std::unique_ptr<IWorkload>();
2235}
2236
Keith Davis3ae3f972021-05-21 16:33:48 +01002237std::unique_ptr<IWorkload> IWorkloadFactory::CreateShape(const ShapeQueueDescriptor& /*descriptor*/,
2238 const WorkloadInfo& /*info*/) const
2239{
2240 return std::unique_ptr<IWorkload>();
2241}
2242
Derek Lamberti901ea112019-12-10 22:07:09 +00002243std::unique_ptr<IWorkload> IWorkloadFactory::CreateSlice(const SliceQueueDescriptor& /*descriptor*/,
2244 const WorkloadInfo& /*info*/) const
2245{
2246 return std::unique_ptr<IWorkload>();
2247}
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002248
Derek Lamberti901ea112019-12-10 22:07:09 +00002249std::unique_ptr<IWorkload> IWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& /*descriptor*/,
2250 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01002251{
2252 return std::unique_ptr<IWorkload>();
2253}
2254
Derek Lamberti901ea112019-12-10 22:07:09 +00002255std::unique_ptr<IWorkload> IWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& /*descriptor*/,
2256 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002257{
2258 return std::unique_ptr<IWorkload>();
2259}
2260
Derek Lamberti901ea112019-12-10 22:07:09 +00002261std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& /*descriptor*/,
2262 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002263{
2264 return std::unique_ptr<IWorkload>();
2265}
2266
Derek Lamberti901ea112019-12-10 22:07:09 +00002267std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& /*descriptor*/,
2268 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002269{
2270 return std::unique_ptr<IWorkload>();
2271}
2272
Derek Lamberti901ea112019-12-10 22:07:09 +00002273std::unique_ptr<IWorkload> IWorkloadFactory::CreateStack(const StackQueueDescriptor& /*descriptor*/,
2274 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar972af152019-06-11 14:14:03 +01002275{
2276 return std::unique_ptr<IWorkload>();
2277}
2278
Derek Lamberti901ea112019-12-10 22:07:09 +00002279std::unique_ptr<IWorkload> IWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& /*descriptor*/,
2280 const WorkloadInfo& /*info*/) const
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01002281{
2282 return std::unique_ptr<IWorkload>();
2283}
2284
Derek Lamberti901ea112019-12-10 22:07:09 +00002285std::unique_ptr<IWorkload> IWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& /*descriptor*/,
2286 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002287{
2288 return std::unique_ptr<IWorkload>();
2289}
2290
Derek Lamberti901ea112019-12-10 22:07:09 +00002291std::unique_ptr<IWorkload> IWorkloadFactory::CreateSwitch(const SwitchQueueDescriptor& /*descriptor*/,
2292 const WorkloadInfo& /*info*/) const
Sadik Armaganeff363d2019-04-05 15:25:46 +01002293{
2294 return std::unique_ptr<IWorkload>();
2295}
2296
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002297std::unique_ptr<IWorkload> IWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& /*descriptor*/,
2298 const WorkloadInfo& /*info*/) const
2299{
2300 return std::unique_ptr<IWorkload>();
2301}
2302
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01002303std::unique_ptr<IWorkload> IWorkloadFactory::CreateTransposeConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00002304 const TransposeConvolution2dQueueDescriptor& /*descriptor*/,
2305 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01002306{
2307 return std::unique_ptr<IWorkload>();
surmeh013537c2c2018-05-18 16:31:43 +01002308}
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01002309
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01002310std::unique_ptr<IWorkload> IWorkloadFactory::CreateUnidirectionalSequenceLstm(
2311 const UnidirectionalSequenceLstmQueueDescriptor& /*descriptor*/,
2312 const WorkloadInfo& /*info*/) const
2313{
2314 return std::unique_ptr<IWorkload>();
2315}
2316
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01002317} // namepsace armnn