blob: 1283f676601290ff26e0ee421f72b1c7da495b19 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00005
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <Layer.hpp>
7#include <LayersFwd.hpp>
David Beckdcb751f2018-10-03 11:42:42 +01008
David Beckb4540be2018-09-24 13:18:27 +01009#include <armnn/Types.hpp>
Sadik Armagana097d2a2021-11-24 15:47:28 +000010#include <armnn/backends/IBackendInternal.hpp>
Francis Murtaghcae45682021-04-26 10:07:49 +010011#include <armnn/backends/ILayerSupport.hpp>
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000012#include <armnn/BackendHelper.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000013#include <armnn/BackendRegistry.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010014#include <armnn/utility/PolymorphicDowncast.hpp>
Finn Williams3e54d032020-10-22 16:53:35 +010015#include <armnn/utility/TransformIterator.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000016
Colm Donelan0c479742021-12-10 12:43:54 +000017#include <armnn/backends/WorkloadFactory.hpp>
18#include <armnn/backends/TensorHandle.hpp>
telsoa014fcda012018-03-09 14:13:49 +000019
David Beck111b5d92018-11-12 14:59:37 +000020#include <sstream>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000021
telsoa014fcda012018-03-09 14:13:49 +000022namespace armnn
23{
24
telsoa01c577f2c2018-08-31 09:22:23 +010025namespace
26{
Finn Williams3e54d032020-10-22 16:53:35 +010027using LayerList = std::list<Layer*>;
28using Iterator = LayerList::const_iterator; // Const so pointers in the list can't be modified externally.
telsoa01c577f2c2018-08-31 09:22:23 +010029
David Beck29c75de2018-10-23 13:35:58 +010030const TensorInfo OverrideDataType(const TensorInfo& info, Optional<DataType> type)
31{
32 if (!type)
33 {
34 return info;
telsoa01c577f2c2018-08-31 09:22:23 +010035 }
36
Matthew Sloyan81beae32021-07-13 19:46:11 +010037 return TensorInfo(info.GetShape(),
38 type.value(),
39 info.GetQuantizationScale(),
40 info.GetQuantizationOffset(),
41 info.IsConstant());
telsoa01c577f2c2018-08-31 09:22:23 +010042}
43
David Beck29c75de2018-10-23 13:35:58 +010044} // anonymous namespace
45
Sadik Armagana097d2a2021-11-24 15:47:28 +000046inline armnn::Optional<armnn::DataType> GetBiasTypeFromWeightsType(armnn::Optional<armnn::DataType> weightsType)
47{
48 if (!weightsType)
49 {
50 return weightsType;
51 }
52
53 switch(weightsType.value())
54 {
55 case armnn::DataType::BFloat16:
56 case armnn::DataType::Float16:
57 case armnn::DataType::Float32:
58 return weightsType;
59 case armnn::DataType::QAsymmS8:
60 case armnn::DataType::QAsymmU8:
61 case armnn::DataType::QSymmS8:
62 case armnn::DataType::QSymmS16:
63 return armnn::DataType::Signed32;
64 default:
65 ARMNN_ASSERT_MSG(false, "GetBiasTypeFromWeightsType(): Unsupported data type.");
66 }
67 return armnn::EmptyOptional();
68}
69
70
Sadik Armagan045f6be2020-09-10 13:37:32 +010071bool IWorkloadFactory::IsLayerConfigurationSupported(const BackendId& backendId,
72 const IConnectableLayer& connectableLayer,
73 Optional<DataType> dataType,
74 std::string& outReasonIfUnsupported,
75 const ModelOptions& modelOptions)
telsoa014fcda012018-03-09 14:13:49 +000076{
David Beck33f0ae02018-10-18 15:13:56 +010077 Optional<std::string&> reason = outReasonIfUnsupported;
telsoa014fcda012018-03-09 14:13:49 +000078 bool result;
Jan Eilersbb446e52020-04-02 13:56:54 +010079 const Layer& layer = *(PolymorphicDowncast<const Layer*>(&connectableLayer));
David Beckdcb751f2018-10-03 11:42:42 +010080
David Beck111b5d92018-11-12 14:59:37 +000081 auto const& backendRegistry = BackendRegistryInstance();
82 if (!backendRegistry.IsBackendRegistered(backendId))
83 {
84 std::stringstream ss;
85 ss << connectableLayer.GetName() << " is not supported on " << backendId
86 << " because this backend is not registered.";
87
88 outReasonIfUnsupported = ss.str();
89 return false;
90 }
91
92 auto backendFactory = backendRegistry.GetFactory(backendId);
93 auto backendObject = backendFactory();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000094 auto layerSupportObject = LayerSupportHandle(backendObject->GetLayerSupport(modelOptions), backendId);
David Beck33f0ae02018-10-18 15:13:56 +010095
telsoa014fcda012018-03-09 14:13:49 +000096 switch(layer.GetType())
97 {
98 case LayerType::Activation:
99 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100100 auto cLayer = PolymorphicDowncast<const ActivationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000101 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100102 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000103 result = layerSupportObject.IsActivationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100104 OverrideDataType(input, dataType),
105 OverrideDataType(output, dataType),
106 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100107 reason);
telsoa014fcda012018-03-09 14:13:49 +0000108 break;
109 }
110 case LayerType::Addition:
111 {
112 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
113 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
114 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000115 result = layerSupportObject.IsAdditionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100116 OverrideDataType(input0, dataType),
117 OverrideDataType(input1, dataType),
118 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100119 reason);
telsoa014fcda012018-03-09 14:13:49 +0000120 break;
121 }
Nikhil Rajee391d52019-09-05 17:50:44 +0100122 case LayerType::ArgMinMax:
123 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100124 auto cLayer = PolymorphicDowncast<const ArgMinMaxLayer*>(&layer);
Nikhil Rajee391d52019-09-05 17:50:44 +0100125 const ArgMinMaxDescriptor& descriptor = cLayer->GetParameters();
126
127 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
128 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000129 result = layerSupportObject.IsArgMinMaxSupported(
Nikhil Rajee391d52019-09-05 17:50:44 +0100130 OverrideDataType(input, dataType),
Narumol Prangnawaratd1f57732019-10-31 14:24:02 +0000131 OverrideDataType(output, DataType::Signed32),
Nikhil Rajee391d52019-09-05 17:50:44 +0100132 descriptor,
133 reason);
134 break;
135 }
Samuel Yap6b478092022-07-06 15:36:03 +0100136 case LayerType::BatchMatMul:
137 {
138 auto cLayer = PolymorphicDowncast<const BatchMatMulLayer*>(&layer);
139 const BatchMatMulDescriptor& descriptor = cLayer->GetParameters();
140
141 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
142 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
143 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
144 result = layerSupportObject.IsBatchMatMulSupported(
145 OverrideDataType(input0, dataType),
146 OverrideDataType(input1, dataType),
147 OverrideDataType(output, dataType),
148 descriptor,
149 reason);
150 break;
151 }
telsoa014fcda012018-03-09 14:13:49 +0000152 case LayerType::BatchNormalization:
153 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100154 auto cLayer = PolymorphicDowncast<const BatchNormalizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000155 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100156 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
157 const TensorInfo& mean = cLayer->m_Mean->GetTensorInfo();
158 const TensorInfo& var = cLayer->m_Variance->GetTensorInfo();
159 const TensorInfo& beta = cLayer->m_Beta->GetTensorInfo();
160 const TensorInfo& gamma = cLayer->m_Gamma->GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000161 result = layerSupportObject.IsBatchNormalizationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100162 OverrideDataType(input, dataType),
163 OverrideDataType(output, dataType),
164 OverrideDataType(mean, dataType),
165 OverrideDataType(var, dataType),
166 OverrideDataType(beta, dataType),
167 OverrideDataType(gamma, dataType),
168 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100169 reason);
telsoa014fcda012018-03-09 14:13:49 +0000170 break;
171 }
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000172 case LayerType::BatchToSpaceNd:
173 {
174 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
175 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Jan Eilersbb446e52020-04-02 13:56:54 +0100176 auto cLayer = PolymorphicDowncast<const BatchToSpaceNdLayer*>(&layer);
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000177
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000178 result = layerSupportObject.IsBatchToSpaceNdSupported(OverrideDataType(input, dataType),
179 OverrideDataType(output, dataType),
180 cLayer->GetParameters(),
181 reason);
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000182 break;
183 }
mathad01b392e982021-04-07 12:07:30 +0100184 case LayerType::Cast:
185 {
186 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
187 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
188
189 result = layerSupportObject.IsCastSupported(OverrideDataType(input, dataType),
190 OverrideDataType(output, dataType),
191 reason);
192 break;
193 }
Simon Obute51f67772021-09-03 15:50:13 +0100194 case LayerType::ChannelShuffle:
195 {
196 auto cLayer = PolymorphicDowncast<const ChannelShuffleLayer*>(&layer);
197
198 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
199 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
200
201 const ChannelShuffleDescriptor descriptor = cLayer->GetParameters();
202
203 result = layerSupportObject.IsChannelShuffleSupported(OverrideDataType(input, dataType),
204 OverrideDataType(output, dataType),
205 descriptor,
206 reason);
207 break;
208 }
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100209 case LayerType::Comparison:
210 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100211 auto cLayer = PolymorphicDowncast<const ComparisonLayer*>(&layer);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100212
213 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
214 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
215 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
216
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000217 result = layerSupportObject.IsComparisonSupported(OverrideDataType(input0, dataType),
218 OverrideDataType(input1, dataType),
219 OverrideDataType(output, DataType::Boolean),
220 cLayer->GetParameters(),
221 reason);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100222 break;
223 }
telsoa014fcda012018-03-09 14:13:49 +0000224 case LayerType::Constant:
225 {
226 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000227 result = layerSupportObject.IsConstantSupported(OverrideDataType(output, dataType), reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100228 break;
229 }
230 case LayerType::ConvertFp16ToFp32:
231 {
232 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
233 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000234 result = layerSupportObject.IsConvertFp16ToFp32Supported(input, output, reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100235 break;
236 }
237 case LayerType::ConvertFp32ToFp16:
238 {
239 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
240 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000241 result = layerSupportObject.IsConvertFp32ToFp16Supported(input, output, reason);
telsoa014fcda012018-03-09 14:13:49 +0000242 break;
243 }
244 case LayerType::Convolution2d:
245 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100246 auto cLayer = PolymorphicDowncast<const Convolution2dLayer*>(&layer);
arovir01a6824102018-08-28 17:40:45 +0100247
248 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
249 dataType);
telsoa01c577f2c2018-08-31 09:22:23 +0100250 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100251 ARMNN_ASSERT_MSG(layer.GetInputSlot(1).GetConnection(),
252 "Convolution2dLayer: Weights should be connected as a Constant Layer.");
253 const TensorInfo weights = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
254 dataType);
surmeh013537c2c2018-05-18 16:31:43 +0100255
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100256 const Convolution2dDescriptor& descriptor = cLayer->GetParameters();
surmeh013537c2c2018-05-18 16:31:43 +0100257
arovir01a6824102018-08-28 17:40:45 +0100258 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100259 Optional<TensorInfo> biases;
surmeh013537c2c2018-05-18 16:31:43 +0100260 if (descriptor.m_BiasEnabled)
261 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100262 ARMNN_ASSERT_MSG(layer.GetInputSlot(2).GetConnection(),
263 "Convolution2dLayer: Bias should be connected as a Constant Layer.");
264 biases = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
265 GetBiasTypeFromWeightsType(dataType));
surmeh013537c2c2018-05-18 16:31:43 +0100266 }
267
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000268 result = layerSupportObject.IsConvolution2dSupported(
surmeh013537c2c2018-05-18 16:31:43 +0100269 input,
270 output,
271 descriptor,
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100272 weights,
arovir01a6824102018-08-28 17:40:45 +0100273 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100274 reason);
telsoa014fcda012018-03-09 14:13:49 +0000275 break;
276 }
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100277 case LayerType::Convolution3d:
278 {
279 auto cLayer = PolymorphicDowncast<const Convolution3dLayer*>(&layer);
280
281 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
282 dataType);
283 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100284
285 ARMNN_ASSERT_MSG(layer.GetInputSlot(1).GetConnection(),
286 "Convolution3dLayer: Weights should be connected as a Constant Layer.");
287 const TensorInfo weights = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
288 dataType);
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100289
290 const Convolution3dDescriptor& descriptor = cLayer->GetParameters();
291
292 // Construct optional biases object based on the value of m_BiasEnabled
293 Optional<TensorInfo> biases;
294 if (descriptor.m_BiasEnabled)
295 {
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100296 biases = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
297 GetBiasTypeFromWeightsType(dataType));
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100298 }
299
300 result = layerSupportObject.IsConvolution3dSupported(
301 input,
302 output,
303 descriptor,
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100304 weights,
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100305 biases,
306 reason);
307 break;
308 }
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000309 case LayerType::Debug:
310 {
311 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
312 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
313
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000314 result = layerSupportObject.IsDebugSupported(OverrideDataType(input, dataType),
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000315 OverrideDataType(output, dataType),
316 reason);
317 break;
318 }
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100319 case LayerType::DepthToSpace:
320 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100321 auto cLayer = PolymorphicDowncast<const DepthToSpaceLayer*>(&layer);
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100322
323 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
324 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
325
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000326 result = layerSupportObject.IsDepthToSpaceSupported(OverrideDataType(input, dataType),
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100327 OverrideDataType(output, dataType),
328 cLayer->GetParameters(),
329 reason);
330 break;
331 }
telsoa014fcda012018-03-09 14:13:49 +0000332 case LayerType::DepthwiseConvolution2d:
333 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100334 auto cLayer = PolymorphicDowncast<const DepthwiseConvolution2dLayer*>(&layer);
Cathal Corbett06902652022-04-14 17:55:11 +0100335 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
336 dataType);
337 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
338 const TensorInfo& weights = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
339 dataType);
340
341 ARMNN_ASSERT(cLayer->GetInputSlot(1).GetConnection() != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100342
telsoa01c577f2c2018-08-31 09:22:23 +0100343 const DepthwiseConvolution2dDescriptor& descriptor = cLayer->GetParameters();
arovir01a6824102018-08-28 17:40:45 +0100344
345 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100346 Optional<TensorInfo> biases;
telsoa01c577f2c2018-08-31 09:22:23 +0100347 if (descriptor.m_BiasEnabled)
348 {
Cathal Corbett06902652022-04-14 17:55:11 +0100349 biases = OverrideDataType(cLayer->GetInputSlot(2).GetConnection()->GetTensorInfo(),
350 GetBiasTypeFromWeightsType(dataType));
telsoa01c577f2c2018-08-31 09:22:23 +0100351 }
telsoa01c577f2c2018-08-31 09:22:23 +0100352
Cathal Corbett06902652022-04-14 17:55:11 +0100353 result = layerSupportObject.IsDepthwiseConvolutionSupported(input,
354 output,
355 descriptor,
356 weights,
357 biases,
358 reason);
telsoa014fcda012018-03-09 14:13:49 +0000359 break;
360 }
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000361 case LayerType::Dequantize:
362 {
363 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
364 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
365
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000366 result = layerSupportObject.IsDequantizeSupported(input,
367 OverrideDataType(output, dataType),
368 reason);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000369 break;
370 }
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000371 case LayerType::DetectionPostProcess:
372 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100373 auto cLayer = PolymorphicDowncast<const DetectionPostProcessLayer*>(&layer);
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000374 const TensorInfo& boxEncodings = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
375 const TensorInfo& scores = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
376 const TensorInfo& anchors = cLayer->m_Anchors->GetTensorInfo();
377
378 const TensorInfo& detectionBoxes = layer.GetOutputSlot(0).GetTensorInfo();
379 const TensorInfo& detectionClasses = layer.GetOutputSlot(1).GetTensorInfo();
380 const TensorInfo& detectionScores = layer.GetOutputSlot(2).GetTensorInfo();
381 const TensorInfo& numDetections = layer.GetOutputSlot(3).GetTensorInfo();
382
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000383 const DetectionPostProcessDescriptor& descriptor = cLayer->GetParameters();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000384 result = layerSupportObject.IsDetectionPostProcessSupported(boxEncodings,
385 scores,
386 anchors,
387 detectionBoxes,
388 detectionClasses,
389 detectionScores,
390 numDetections,
391 descriptor,
392 reason);
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000393 break;
394 }
josh minor4a3c6102020-01-06 16:40:46 -0600395 case LayerType::ElementwiseUnary:
396 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100397 auto cLayer = PolymorphicDowncast<const ElementwiseUnaryLayer*>(&layer);
josh minor4a3c6102020-01-06 16:40:46 -0600398
399 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
400 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
401
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000402 result = layerSupportObject.IsElementwiseUnarySupported(OverrideDataType(input, dataType),
403 OverrideDataType(output, dataType),
404 cLayer->GetParameters(),
405 reason);
josh minor4a3c6102020-01-06 16:40:46 -0600406 break;
407 }
Ryan OSheaec6c6802020-06-05 17:17:06 +0100408 case LayerType::Fill:
409 {
410 auto cLayer = PolymorphicDowncast<const FillLayer*>(&layer);
411 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
412 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
413 const FillDescriptor& descriptor = cLayer->GetParameters();
414
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000415 result = layerSupportObject.IsFillSupported(
Ryan OSheaec6c6802020-06-05 17:17:06 +0100416 OverrideDataType(input, dataType),
417 OverrideDataType(output, dataType),
418 descriptor,
419 reason);
420 break;
421 }
telsoa014fcda012018-03-09 14:13:49 +0000422 case LayerType::FakeQuantization:
423 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100424 auto cLayer = PolymorphicDowncast<const FakeQuantizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000425 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000426 result = layerSupportObject.IsFakeQuantizationSupported(OverrideDataType(input, dataType),
427 cLayer->GetParameters(),
428 reason);
telsoa014fcda012018-03-09 14:13:49 +0000429 break;
430 }
431 case LayerType::Floor:
432 {
433 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
434 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000435 result = layerSupportObject.IsFloorSupported(OverrideDataType(input, dataType),
436 OverrideDataType(output, dataType),
437 reason);
telsoa014fcda012018-03-09 14:13:49 +0000438 break;
439 }
440 case LayerType::FullyConnected:
441 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100442 auto cLayer = PolymorphicDowncast<const FullyConnectedLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000443 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100444 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000445
446 const FullyConnectedDescriptor& descriptor = cLayer->GetParameters();
447 TensorInfo weightsInfo;
448 const TensorInfo* weightsInfoPtr = nullptr;
449
Matthew Sloyan81beae32021-07-13 19:46:11 +0100450 weightsInfo = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(), dataType);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000451 weightsInfoPtr = &weightsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100452
453 TensorInfo biasInfo;
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000454 const TensorInfo* biasInfoPtr = nullptr;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000455 static const TensorInfo dummyBFloat16Bias(TensorShape({1,1,1,1}), DataType::BFloat16);
telsoa01c577f2c2018-08-31 09:22:23 +0100456 static const TensorInfo dummyFloat16Bias(TensorShape({1,1,1,1}), DataType::Float16);
457 static const TensorInfo dummyFloat32Bias(TensorShape({1,1,1,1}), DataType::Float32);
458 static const TensorInfo dummyQA8Bias(TensorShape({1,1,1,1}), DataType::Signed32);
459
telsoa01c577f2c2018-08-31 09:22:23 +0100460 if (descriptor.m_BiasEnabled)
461 {
Matthew Sloyan81beae32021-07-13 19:46:11 +0100462 biasInfo = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(), dataType);
463 biasInfoPtr = &biasInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100464 }
465 else
466 {
467 // If biases are not enabled pass a dummy tensorinfo for the validation
468 switch(input.GetDataType())
469 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000470 case DataType::BFloat16:
471 {
472 biasInfoPtr = &dummyBFloat16Bias;
473 break;
474 }
telsoa01c577f2c2018-08-31 09:22:23 +0100475 case DataType::Float16:
476 {
477 biasInfoPtr = &dummyFloat16Bias;
478 break;
479 }
480 case DataType::Float32:
481 {
482 biasInfoPtr = &dummyFloat32Bias;
483 break;
484 }
Derek Lambertif90c56d2020-01-10 17:14:08 +0000485 case DataType::QAsymmU8:
Keith Davisa8565012020-02-14 12:22:40 +0000486 case DataType::QAsymmS8:
Keith Davis9d0ff742020-02-03 14:47:54 +0000487 case DataType::QSymmS8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000488 case DataType::QSymmS16:
telsoa01c577f2c2018-08-31 09:22:23 +0100489 {
490 biasInfoPtr = &dummyQA8Bias;
491 break;
492 }
493 default:
494 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100495 ARMNN_ASSERT_MSG(false, "Unexpected bias type");
telsoa01c577f2c2018-08-31 09:22:23 +0100496 }
497 }
498 }
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000499 result = layerSupportObject.IsFullyConnectedSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100500 OverrideDataType(input, dataType),
501 OverrideDataType(output, dataType),
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000502 *weightsInfoPtr,
telsoa01c577f2c2018-08-31 09:22:23 +0100503 *biasInfoPtr,
504 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100505 reason);
telsoa014fcda012018-03-09 14:13:49 +0000506 break;
507 }
narpra01b89b05f2019-01-16 09:53:09 +0000508 case LayerType::Gather:
509 {
510 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
511 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
512 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Teresa Charlin52664732020-06-29 16:27:03 +0100513 auto cLayer = PolymorphicDowncast<const GatherLayer*>(&layer);
514 const GatherDescriptor& descriptor = cLayer->GetParameters();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000515 result = layerSupportObject.IsGatherSupported(OverrideDataType(input0, dataType),
516 input1,
517 OverrideDataType(output, dataType),
518 descriptor,
519 reason);
narpra01b89b05f2019-01-16 09:53:09 +0000520 break;
521 }
Teresa Charlinb2d3ec52022-04-12 22:07:09 +0100522 case LayerType::GatherNd:
523 {
524 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
525 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
526 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
527 result = layerSupportObject.IsGatherNdSupported(OverrideDataType(input0, dataType),
528 input1,
529 OverrideDataType(output, dataType),
530 reason);
531 break;
532 }
telsoa014fcda012018-03-09 14:13:49 +0000533 case LayerType::Input:
534 {
535 const TensorInfo& input = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000536 result = layerSupportObject.IsInputSupported(OverrideDataType(input, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000537 break;
538 }
Kevin Mayce5045a2019-10-02 14:07:47 +0100539 case LayerType::InstanceNormalization:
540 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100541 auto cLayer = PolymorphicDowncast<const InstanceNormalizationLayer*>(&layer);
Kevin Mayce5045a2019-10-02 14:07:47 +0100542 const InstanceNormalizationDescriptor& descriptor = cLayer->GetParameters();
543
544 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
545 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
546
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000547 result = layerSupportObject.IsInstanceNormalizationSupported(
Kevin Mayce5045a2019-10-02 14:07:47 +0100548 OverrideDataType(input, dataType),
549 OverrideDataType(output, dataType),
550 descriptor,
551 reason);
552 break;
553 }
telsoa014fcda012018-03-09 14:13:49 +0000554 case LayerType::L2Normalization:
555 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100556 auto cLayer = PolymorphicDowncast<const L2NormalizationLayer*>(&layer);
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100557 const L2NormalizationDescriptor& descriptor = cLayer->GetParameters();
558
telsoa014fcda012018-03-09 14:13:49 +0000559 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100560 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100561
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000562 result = layerSupportObject.IsL2NormalizationSupported(
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100563 OverrideDataType(input, dataType),
564 OverrideDataType(output, dataType),
565 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100566 reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100567 break;
568 }
James Conroyaba90cd2020-11-06 16:28:18 +0000569 case LayerType::LogicalBinary:
570 {
571 auto cLayer = PolymorphicDowncast<const LogicalBinaryLayer*>(&layer);
572
573 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
574 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
575 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
576
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000577 result = layerSupportObject.IsLogicalBinarySupported(input0,
578 input1,
579 output,
580 cLayer->GetParameters(),
581 reason);
James Conroyaba90cd2020-11-06 16:28:18 +0000582 break;
583 }
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100584 case LayerType::LogSoftmax:
585 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100586 auto cLayer = PolymorphicDowncast<const LogSoftmaxLayer*>(&layer);
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100587
588 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
589 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
590
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000591 result = layerSupportObject.IsLogSoftmaxSupported(OverrideDataType(input, dataType),
592 OverrideDataType(output, dataType),
593 cLayer->GetParameters(),
594 reason);
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100595 break;
596 }
telsoa01c577f2c2018-08-31 09:22:23 +0100597 case LayerType::Lstm:
598 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100599 auto cLayer = PolymorphicDowncast<const LstmLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100600 const LstmDescriptor& descriptor = cLayer->GetParameters();
601
602 // All inputs.
603 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
604 dataType);
605 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
606 dataType);
607 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
608 dataType);
609 // All outputs
610 const TensorInfo& scratchBuffer = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
611 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
612 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
613 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(3).GetTensorInfo(), dataType);
614
615 // Basic parameters
616 const TensorInfo& inputToForgetWeights
617 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
618 const TensorInfo& inputToCellWeights
619 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
620 const TensorInfo& inputToOutputWeights
621 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
622 const TensorInfo& recurrentToForgetWeights
623 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
624 const TensorInfo& recurrentToCellWeights
625 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
626 const TensorInfo& recurrentToOutputWeights
627 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
628 const TensorInfo& forgetGateBias
629 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
630 const TensorInfo& cellBias
631 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
632 const TensorInfo& outputGateBias
633 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
634
Jan Eilersd01a83c2019-07-03 18:20:40 +0100635 LstmInputParamsInfo paramsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100636
Jan Eilersd01a83c2019-07-03 18:20:40 +0100637 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
638 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
639 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
640 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
641 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
642 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
643 paramsInfo.m_ForgetGateBias = &forgetGateBias;
644 paramsInfo.m_CellBias = &cellBias;
645 paramsInfo.m_OutputGateBias = &outputGateBias;
646
647
648 // Optional parameters
telsoa01c577f2c2018-08-31 09:22:23 +0100649 TensorInfo optInputToInputWeights;
650 TensorInfo optRecurrentToInputWeights;
651 TensorInfo optCellToInputWeights;
652 TensorInfo optInputGateBias;
653 TensorInfo optProjectionWeights;
654 TensorInfo optProjectionBias;
655 TensorInfo optCellToForgetWeights;
656 TensorInfo optCellToOutputWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100657 TensorInfo optInputLayerNormWeights;
658 TensorInfo optForgetLayerNormWeights;
659 TensorInfo optCellLayerNormWeights;
660 TensorInfo optOutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100661
662 if(!descriptor.m_CifgEnabled)
663 {
664 optInputToInputWeights =
665 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100666 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100667
668 optRecurrentToInputWeights =
669 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100670 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100671 optInputGateBias =
672 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100673 paramsInfo.m_InputGateBias = &optInputGateBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100674 }
675
676 if(descriptor.m_ProjectionEnabled)
677 {
678 optProjectionWeights =
679 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100680 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100681 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
682 {
683 optProjectionBias =
684 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100685 paramsInfo.m_ProjectionBias = &optProjectionBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100686 }
687 }
688
689 if(descriptor.m_PeepholeEnabled)
690 {
Jan Eilerse2062cd2020-03-30 15:07:45 +0100691 if(!descriptor.m_CifgEnabled)
692 {
693 optCellToInputWeights =
694 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
695 dataType);
696 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
697 }
telsoa01c577f2c2018-08-31 09:22:23 +0100698 optCellToForgetWeights =
699 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100700 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100701 optCellToOutputWeights =
702 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100703 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100704 }
705
Jan Eilers38e05bd2019-06-26 13:10:09 +0100706 if(descriptor.m_LayerNormEnabled)
707 {
Ferran Balaguere30c16e2019-07-24 17:03:45 +0100708 if (!descriptor.m_CifgEnabled)
709 {
710 optInputLayerNormWeights = OverrideDataType(
711 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
712 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
713 }
Jan Eilers38e05bd2019-06-26 13:10:09 +0100714
715 optForgetLayerNormWeights = OverrideDataType(
716 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100717 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100718
719 optCellLayerNormWeights = OverrideDataType(
720 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100721 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100722
723 optOutputLayerNormWeights = OverrideDataType(
724 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100725 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100726 }
727
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000728 result = layerSupportObject.IsLstmSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100729 input,
730 outputStateIn,
731 cellStateIn,
732 scratchBuffer,
733 outputStateOut,
734 cellStateOut,
735 output,
736 descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +0100737 paramsInfo,
738 reason);
telsoa014fcda012018-03-09 14:13:49 +0000739 break;
740 }
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000741 case LayerType::Maximum:
742 {
743 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
744 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
745 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
746
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000747 result = layerSupportObject.IsMaximumSupported(OverrideDataType(input0, dataType),
748 OverrideDataType(input1, dataType),
749 OverrideDataType(output, dataType),
750 reason);
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000751 break;
752 }
narpra01b89b05f2019-01-16 09:53:09 +0000753 case LayerType::MemCopy:
754 {
755 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
756 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000757
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000758 result = layerSupportObject.IsMemCopySupported(OverrideDataType(input, dataType),
759 OverrideDataType(output, dataType),
760 reason);
narpra01b89b05f2019-01-16 09:53:09 +0000761 break;
762 }
Derek Lambertif674aa02019-08-01 15:56:25 +0100763 case LayerType::MemImport:
764 {
765 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
766 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
767
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000768 result = layerSupportObject.IsMemImportSupported(OverrideDataType(input, dataType),
769 OverrideDataType(output, dataType),
770 reason);
Derek Lambertif674aa02019-08-01 15:56:25 +0100771 break;
772 }
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100773 case LayerType::Merge:
774 {
775 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
776 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
777 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
778
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000779 result = layerSupportObject.IsMergeSupported(OverrideDataType(input0, dataType),
780 OverrideDataType(input1, dataType),
781 OverrideDataType(output, dataType),
782 reason);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100783 break;
784 }
Jim Flynne242f2d2019-05-22 14:24:13 +0100785 case LayerType::Concat:
telsoa014fcda012018-03-09 14:13:49 +0000786 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100787 auto cLayer = PolymorphicDowncast<const ConcatLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000788
telsoa01c577f2c2018-08-31 09:22:23 +0100789 // Get vector of all inputs.
790 auto getTensorInfo = [&dataType](const InputSlot& slot)
telsoa014fcda012018-03-09 14:13:49 +0000791 {
telsoa01c577f2c2018-08-31 09:22:23 +0100792 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
telsoa014fcda012018-03-09 14:13:49 +0000793 };
Finn Williams3e54d032020-10-22 16:53:35 +0100794
795 auto beginI = MakeTransformIterator(layer.GetInputSlots().begin(), getTensorInfo);
796 auto endI = MakeTransformIterator(layer.GetInputSlots().end(), getTensorInfo);
telsoa01c577f2c2018-08-31 09:22:23 +0100797 std::vector<TensorInfo> inputs(beginI, endI);
telsoa014fcda012018-03-09 14:13:49 +0000798
telsoa01c577f2c2018-08-31 09:22:23 +0100799 auto getTensorInfoPtr = [](const TensorInfo& info)
800 {
801 return &info;
802 };
Finn Williams3e54d032020-10-22 16:53:35 +0100803
804 auto beginPtr = MakeTransformIterator(inputs.begin(), getTensorInfoPtr);
805 auto endPtr = MakeTransformIterator(inputs.end(), getTensorInfoPtr);
telsoa01c577f2c2018-08-31 09:22:23 +0100806 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
telsoa014fcda012018-03-09 14:13:49 +0000807
Nikhil Raj8599a412018-11-19 14:51:07 +0000808 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
809
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000810 result = layerSupportObject.IsConcatSupported(inputPtrs, output, cLayer->GetParameters(), reason);
Jim Flynne242f2d2019-05-22 14:24:13 +0100811
812
telsoa014fcda012018-03-09 14:13:49 +0000813 break;
814 }
815 case LayerType::Multiplication:
816 {
817 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
818 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100819 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000820 result = layerSupportObject.IsMultiplicationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100821 OverrideDataType(input0, dataType),
822 OverrideDataType(input1, dataType),
823 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100824 reason);
telsoa014fcda012018-03-09 14:13:49 +0000825 break;
826 }
827 case LayerType::Normalization:
828 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100829 auto cLayer = PolymorphicDowncast<const NormalizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000830 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
831 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000832 result = layerSupportObject.IsNormalizationSupported(OverrideDataType(input, dataType),
833 OverrideDataType(output, dataType),
834 cLayer->GetParameters(),
835 reason);
telsoa014fcda012018-03-09 14:13:49 +0000836 break;
837 }
838 case LayerType::Output:
839 {
840 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000841 result = layerSupportObject.IsOutputSupported(OverrideDataType(output, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000842 break;
843 }
844 case LayerType::Permute:
845 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100846 auto cLayer = PolymorphicDowncast<const PermuteLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000847 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
848 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000849 result = layerSupportObject.IsPermuteSupported(OverrideDataType(input, dataType),
850 OverrideDataType(output, dataType),
851 cLayer->GetParameters(),
852 reason);
telsoa014fcda012018-03-09 14:13:49 +0000853 break;
854 }
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100855 case LayerType::Pad:
856 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100857 auto cLayer = PolymorphicDowncast<const PadLayer*>(&layer);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100858 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
859 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000860 result = layerSupportObject.IsPadSupported(
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100861 OverrideDataType(input, dataType),
862 OverrideDataType(output, dataType),
863 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100864 reason);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100865 break;
866 }
telsoa014fcda012018-03-09 14:13:49 +0000867 case LayerType::Pooling2d:
868 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100869 auto cLayer = PolymorphicDowncast<const Pooling2dLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000870 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
871 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000872 result = layerSupportObject.IsPooling2dSupported(OverrideDataType(input, dataType),
873 OverrideDataType(output, dataType),
874 cLayer->GetParameters(),
875 reason);
telsoa014fcda012018-03-09 14:13:49 +0000876 break;
877 }
Tamás Nyíri7b885b32021-10-26 14:47:57 +0100878 case LayerType::Pooling3d:
879 {
880 auto cLayer = PolymorphicDowncast<const Pooling3dLayer*>(&layer);
881 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
882 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
883 result = layerSupportObject.IsPooling3dSupported(OverrideDataType(input, dataType),
884 OverrideDataType(output, dataType),
885 cLayer->GetParameters(),
886 reason);
887 break;
888 }
Matteo Martincigh49124022019-01-11 13:25:59 +0000889 case LayerType::PreCompiled:
890 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100891 auto cLayer = PolymorphicDowncast<const PreCompiledLayer*>(&layer);
Matteo Martincigh49124022019-01-11 13:25:59 +0000892 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000893 result = layerSupportObject.IsPreCompiledSupported(OverrideDataType(input, dataType),
894 cLayer->GetParameters(),
895 reason);
Matteo Martincigh49124022019-01-11 13:25:59 +0000896 break;
897 }
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000898 case LayerType::Quantize:
899 {
900 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
901 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000902 result = layerSupportObject.IsQuantizeSupported(input, output, reason);
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000903 break;
904 }
James Conroy586a9aa2020-03-20 08:49:33 +0000905 case LayerType::QLstm:
906 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100907 auto cLayer = PolymorphicDowncast<const QLstmLayer*>(&layer);
James Conroy586a9aa2020-03-20 08:49:33 +0000908 const QLstmDescriptor& descriptor = cLayer->GetParameters();
909
910 // Inputs
911 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
912 const TensorInfo& previousOutputIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
913 const TensorInfo& previousCellStateIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
914
915 // Outputs
916 const TensorInfo& outputStateOut = layer.GetOutputSlot(0).GetTensorInfo();
917 const TensorInfo& cellStateOut = layer.GetOutputSlot(1).GetTensorInfo();
918 const TensorInfo& output = layer.GetOutputSlot(2).GetTensorInfo();
919
920 // Lstm parameters
921 LstmInputParamsInfo paramsInfo;
922
923 // Basic parameters
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100924 ARMNN_ASSERT(cLayer->m_BasicParameters.m_InputToForgetWeights.get() != nullptr);
925 ARMNN_ASSERT(cLayer->m_BasicParameters.m_InputToCellWeights.get() != nullptr);
926 ARMNN_ASSERT(cLayer->m_BasicParameters.m_InputToOutputWeights.get() != nullptr);
James Conroy586a9aa2020-03-20 08:49:33 +0000927 paramsInfo.m_InputToForgetWeights = &cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo();
928 paramsInfo.m_InputToCellWeights = &cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo();
929 paramsInfo.m_InputToOutputWeights = &cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo();
930
931 paramsInfo.m_RecurrentToForgetWeights =
932 &cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo();
933 paramsInfo.m_RecurrentToCellWeights =
934 &cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo();
935 paramsInfo.m_RecurrentToOutputWeights =
936 &cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo();
937
938 paramsInfo.m_ForgetGateBias = &cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo();
939 paramsInfo.m_CellBias = &cLayer->m_BasicParameters.m_CellBias->GetTensorInfo();
940 paramsInfo.m_OutputGateBias = &cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo();
941
942 if(!descriptor.m_CifgEnabled)
943 {
944 paramsInfo.m_InputToInputWeights = &cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo();
945 paramsInfo.m_RecurrentToInputWeights =
946 &cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo();
947 paramsInfo.m_InputGateBias = &cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo();
948 }
949
950 if(descriptor.m_ProjectionEnabled)
951 {
952 paramsInfo.m_ProjectionWeights = &cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo();
James Conroyed324052020-05-18 15:16:42 +0100953
954 // Projection bias is optional even if projection is enabled
955 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
956 {
957 paramsInfo.m_ProjectionBias = &cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo();
958 }
James Conroy586a9aa2020-03-20 08:49:33 +0000959 }
960
961 if(descriptor.m_PeepholeEnabled)
962 {
963 if (!descriptor.m_CifgEnabled)
964 {
965 paramsInfo.m_CellToInputWeights =
966 &cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo();
967 }
968
969 paramsInfo.m_CellToForgetWeights =
970 &cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo();
971 paramsInfo.m_CellToOutputWeights = &cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo();
972 }
973
974 if(descriptor.m_LayerNormEnabled)
975 {
976 if (!descriptor.m_CifgEnabled)
977 {
978 paramsInfo.m_InputLayerNormWeights =
979 &cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo();
980 }
981
982 paramsInfo.m_ForgetLayerNormWeights =
983 &cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo();
984 paramsInfo.m_CellLayerNormWeights =
985 &cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo();
986 paramsInfo.m_OutputLayerNormWeights =
987 &cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo();
988 }
989
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000990 result = layerSupportObject.IsQLstmSupported(input,
991 previousOutputIn,
992 previousCellStateIn,
993 outputStateOut,
994 cellStateOut,
995 output,
996 descriptor,
997 paramsInfo,
998 reason);
James Conroy586a9aa2020-03-20 08:49:33 +0000999 break;
1000 }
James Conroyee18dc82019-07-17 11:27:46 +01001001 case LayerType::QuantizedLstm:
1002 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001003 auto cLayer = PolymorphicDowncast<const QuantizedLstmLayer*>(&layer);
James Conroyee18dc82019-07-17 11:27:46 +01001004
1005 // Inputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001006 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1007 const TensorInfo& previousCellStateIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1008 const TensorInfo& previousOutputIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +01001009
1010 // Outputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001011 const TensorInfo& cellStateOut = layer.GetOutputSlot(0).GetTensorInfo();
1012 const TensorInfo& output = layer.GetOutputSlot(1).GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +01001013
1014 // QuantizedLstm parameters
James Conroyee18dc82019-07-17 11:27:46 +01001015 QuantizedLstmInputParamsInfo paramsInfo;
1016
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001017 paramsInfo.m_InputToInputWeights =
1018 &cLayer->m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo();
1019 paramsInfo.m_InputToForgetWeights =
1020 &cLayer->m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo();
1021 paramsInfo.m_InputToCellWeights =
1022 &cLayer->m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo();
1023 paramsInfo.m_InputToOutputWeights =
1024 &cLayer->m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +01001025
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001026 paramsInfo.m_RecurrentToInputWeights =
1027 &cLayer->m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo();
1028 paramsInfo.m_RecurrentToForgetWeights =
1029 &cLayer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo();
1030 paramsInfo.m_RecurrentToCellWeights =
1031 &cLayer->m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo();
1032 paramsInfo.m_RecurrentToOutputWeights =
1033 &cLayer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +01001034
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001035 paramsInfo.m_InputGateBias =
1036 &cLayer->m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo();
1037 paramsInfo.m_ForgetGateBias =
1038 &cLayer->m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo();
1039 paramsInfo.m_CellBias =
1040 &cLayer->m_QuantizedLstmParameters.m_CellBias->GetTensorInfo();
1041 paramsInfo.m_OutputGateBias =
1042 &cLayer->m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo();;
James Conroyee18dc82019-07-17 11:27:46 +01001043
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001044 result = layerSupportObject.IsQuantizedLstmSupported(input,
1045 previousCellStateIn,
1046 previousOutputIn,
1047 cellStateOut,
1048 output,
1049 paramsInfo,
1050 reason);
James Conroyee18dc82019-07-17 11:27:46 +01001051 break;
1052 }
Francis Murtaghe7a86a42018-08-29 12:42:10 +01001053 case LayerType::Division:
1054 {
1055 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1056 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1057 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001058 result = layerSupportObject.IsDivisionSupported(
Francis Murtaghe7a86a42018-08-29 12:42:10 +01001059 OverrideDataType(input0, dataType),
1060 OverrideDataType(input1, dataType),
1061 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +01001062 reason);
Francis Murtaghe7a86a42018-08-29 12:42:10 +01001063 break;
1064 }
Finn Williams2605b232020-06-10 15:53:46 +01001065 case LayerType::Rank:
1066 {
1067 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1068 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001069 result = layerSupportObject.IsRankSupported(OverrideDataType(input, dataType),
1070 OverrideDataType(output, dataType),
1071 reason);
Finn Williams2605b232020-06-10 15:53:46 +01001072 break;
1073 }
telsoa014fcda012018-03-09 14:13:49 +00001074 case LayerType::Reshape:
1075 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001076 auto cLayer = PolymorphicDowncast<const ReshapeLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +00001077 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Kevin Maya023c402019-12-12 17:28:05 +00001078 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001079 result = layerSupportObject.IsReshapeSupported(OverrideDataType(input, dataType),
1080 OverrideDataType(output, dataType),
1081 cLayer->GetParameters(),
1082 reason);
telsoa014fcda012018-03-09 14:13:49 +00001083 break;
1084 }
Teresa Charlina9075df2019-06-27 15:41:57 +01001085 case LayerType::Resize:
1086 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001087 auto cLayer = PolymorphicDowncast<const ResizeLayer*>(&layer);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001088 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Teresa Charlina9075df2019-06-27 15:41:57 +01001089 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001090 result = layerSupportObject.IsResizeSupported(OverrideDataType(input, dataType),
1091 OverrideDataType(output, dataType),
1092 cLayer->GetParameters(),
1093 reason);
Teresa Charlina9075df2019-06-27 15:41:57 +01001094 break;
1095 }
Keith Davis3ae3f972021-05-21 16:33:48 +01001096 case LayerType::Shape:
1097 {
1098 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1099 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1100
1101 result = layerSupportObject.IsShapeSupported(OverrideDataType(input, dataType),
1102 OverrideDataType(output, dataType),
1103 reason);
1104 break;
1105 }
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001106 case LayerType::Slice:
1107 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001108 auto cLayer = PolymorphicDowncast<const SliceLayer*>(&layer);
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001109
1110 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1111 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1112
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001113 result = layerSupportObject.IsSliceSupported(OverrideDataType(input, dataType),
1114 OverrideDataType(output, dataType),
1115 cLayer->GetParameters(),
1116 reason);
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001117 break;
1118 }
telsoa014fcda012018-03-09 14:13:49 +00001119 case LayerType::Softmax:
1120 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001121 auto cLayer = PolymorphicDowncast<const SoftmaxLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +00001122 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +01001123 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001124 result = layerSupportObject.IsSoftmaxSupported(OverrideDataType(input, dataType),
1125 OverrideDataType(output, dataType),
1126 cLayer->GetParameters(),
1127 reason);
telsoa014fcda012018-03-09 14:13:49 +00001128 break;
1129 }
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +00001130 case LayerType::SpaceToBatchNd:
1131 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001132 auto cLayer = PolymorphicDowncast<const SpaceToBatchNdLayer*>(&layer);
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +00001133 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1134 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001135 result = layerSupportObject.IsSpaceToBatchNdSupported(OverrideDataType(input, dataType),
1136 OverrideDataType(output, dataType),
1137 cLayer->GetParameters(),
1138 reason);
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +00001139 break;
1140 }
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001141 case LayerType::SpaceToDepth:
1142 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001143 auto cLayer = PolymorphicDowncast<const SpaceToDepthLayer*>(&layer);
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001144
1145 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1146 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1147
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001148 result = layerSupportObject.IsSpaceToDepthSupported(OverrideDataType(input, dataType),
1149 OverrideDataType(output, dataType),
1150 cLayer->GetParameters(),
1151 reason);
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001152 break;
1153 }
telsoa014fcda012018-03-09 14:13:49 +00001154 case LayerType::Splitter:
1155 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001156 auto cLayer = PolymorphicDowncast<const SplitterLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +00001157 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001158
1159 // Get vector of all outputs.
1160 auto getTensorInfo = [&dataType](const OutputSlot& slot)
1161 {
1162 return OverrideDataType(slot.GetTensorInfo(), dataType);
1163 };
Finn Williams3e54d032020-10-22 16:53:35 +01001164 auto beginI = MakeTransformIterator(layer.GetOutputSlots().begin(), getTensorInfo);
1165 auto endI = MakeTransformIterator(layer.GetOutputSlots().end(), getTensorInfo);
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001166 std::vector<TensorInfo> outputs(beginI, endI);
1167
1168 const std::vector<std::reference_wrapper<TensorInfo>> outputPtrs(outputs.begin(), outputs.end());
1169
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001170 result = layerSupportObject.IsSplitterSupported(OverrideDataType(input, dataType),
1171 outputPtrs,
1172 cLayer->GetParameters(),
1173 reason);
telsoa014fcda012018-03-09 14:13:49 +00001174 break;
1175 }
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001176 case LayerType::Stack:
1177 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001178 auto cLayer = PolymorphicDowncast<const StackLayer*>(&layer);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001179
1180 // Get vector of all inputs.
1181 auto getTensorInfo = [&dataType](const InputSlot& slot)
1182 {
1183 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
1184 };
Finn Williams3e54d032020-10-22 16:53:35 +01001185 auto beginI = MakeTransformIterator(layer.GetInputSlots().begin(), getTensorInfo);
1186 auto endI = MakeTransformIterator(layer.GetInputSlots().end(), getTensorInfo);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001187 std::vector<TensorInfo> inputs(beginI, endI);
1188
1189 auto getTensorInfoPtr = [](const TensorInfo& info)
1190 {
1191 return &info;
1192 };
Finn Williams3e54d032020-10-22 16:53:35 +01001193 auto beginPtr = MakeTransformIterator(inputs.begin(), getTensorInfoPtr);
1194 auto endPtr = MakeTransformIterator(inputs.end(), getTensorInfoPtr);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001195 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
1196
1197 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1198
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001199 result = layerSupportObject.IsStackSupported(inputPtrs, output, cLayer->GetParameters(), reason);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001200
1201 break;
1202 }
Derek Lamberti013c3902019-10-21 10:46:16 +01001203 case LayerType::StandIn:
1204 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001205 auto cLayer = PolymorphicDowncast<const StandInLayer*>(&layer);
Derek Lamberti013c3902019-10-21 10:46:16 +01001206
1207 // Get vector of all inputs.
1208 auto getTensorInfoIn = [&dataType](const InputSlot& slot)
1209 {
1210 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
1211 };
1212 auto getTensorInfoOut = [&dataType](const OutputSlot& slot)
1213 {
1214 return OverrideDataType(slot.GetTensorInfo(), dataType);
1215 };
Finn Williams3e54d032020-10-22 16:53:35 +01001216 auto beginI = MakeTransformIterator(layer.GetInputSlots().begin(), getTensorInfoIn);
1217 auto endI = MakeTransformIterator(layer.GetInputSlots().end(), getTensorInfoIn);
Derek Lamberti013c3902019-10-21 10:46:16 +01001218 std::vector<TensorInfo> inputs(beginI, endI);
1219
Finn Williams3e54d032020-10-22 16:53:35 +01001220 auto beginO = MakeTransformIterator(layer.GetOutputSlots().begin(), getTensorInfoOut);
1221 auto endO = MakeTransformIterator(layer.GetOutputSlots().end(), getTensorInfoOut);
Derek Lamberti013c3902019-10-21 10:46:16 +01001222 std::vector<TensorInfo> outputs(beginO, endO);
1223
1224
1225 auto getTensorInfoPtr = [](const TensorInfo& info)
1226 {
1227 return &info;
1228 };
Finn Williams3e54d032020-10-22 16:53:35 +01001229 auto beginPtrI = MakeTransformIterator(inputs.begin(), getTensorInfoPtr);
1230 auto endPtrI = MakeTransformIterator(inputs.end(), getTensorInfoPtr);
Derek Lamberti013c3902019-10-21 10:46:16 +01001231 std::vector<const TensorInfo*> inputPtrs(beginPtrI, endPtrI);
1232
Finn Williams3e54d032020-10-22 16:53:35 +01001233 auto beginPtrO = MakeTransformIterator(outputs.begin(), getTensorInfoPtr);
1234 auto endPtrO = MakeTransformIterator(outputs.end(), getTensorInfoPtr);
Derek Lamberti013c3902019-10-21 10:46:16 +01001235 std::vector<const TensorInfo*> outputPtrs(beginPtrO, endPtrO);
1236
1237
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001238 result = layerSupportObject.IsStandInSupported(inputPtrs,
1239 outputPtrs,
1240 cLayer->GetParameters(),
1241 reason);
Derek Lamberti013c3902019-10-21 10:46:16 +01001242 break;
1243 }
Conor Kennedy430b5d82018-11-14 15:28:28 +00001244 case LayerType::StridedSlice:
1245 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001246 auto cLayer = PolymorphicDowncast<const StridedSliceLayer*>(&layer);
Conor Kennedy430b5d82018-11-14 15:28:28 +00001247 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1248 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001249 result = layerSupportObject.IsStridedSliceSupported(OverrideDataType(input, dataType),
1250 OverrideDataType(output, dataType),
1251 cLayer->GetParameters(),
1252 reason);
Conor Kennedy430b5d82018-11-14 15:28:28 +00001253 break;
1254 }
David Beckc2044fe2018-09-05 15:00:38 +01001255 case LayerType::Subtraction:
1256 {
1257 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1258 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1259 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001260 result = layerSupportObject.IsSubtractionSupported(
David Beckc2044fe2018-09-05 15:00:38 +01001261 OverrideDataType(input0, dataType),
1262 OverrideDataType(input1, dataType),
1263 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +01001264 reason);
David Beckc2044fe2018-09-05 15:00:38 +01001265 break;
1266 }
Sadik Armaganeff363d2019-04-05 15:25:46 +01001267 case LayerType::Switch:
1268 {
1269 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1270 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1271 const TensorInfo& output0 = layer.GetOutputSlot(0).GetTensorInfo();
1272 const TensorInfo& output1 = layer.GetOutputSlot(1).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001273 result = layerSupportObject.IsSwitchSupported(OverrideDataType(input0, dataType),
1274 OverrideDataType(input1, dataType),
1275 OverrideDataType(output0, dataType),
1276 OverrideDataType(output1, dataType),
1277 reason);
Sadik Armaganeff363d2019-04-05 15:25:46 +01001278 break;
1279 }
narpra0132b90462018-09-13 11:07:48 +01001280 case LayerType::Mean:
1281 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001282 auto cLayer = PolymorphicDowncast<const MeanLayer*>(&layer);
narpra0132b90462018-09-13 11:07:48 +01001283 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1284 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001285 result = layerSupportObject.IsMeanSupported(
narpra0132b90462018-09-13 11:07:48 +01001286 OverrideDataType(input, dataType),
1287 OverrideDataType(output, dataType),
1288 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +01001289 reason);
narpra0132b90462018-09-13 11:07:48 +01001290 break;
1291 }
kevmay0190539692018-11-29 08:40:19 +00001292 case LayerType::Minimum:
1293 {
1294 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1295 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1296 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001297 result = layerSupportObject.IsMinimumSupported(OverrideDataType(input0, dataType),
1298 OverrideDataType(input1, dataType),
1299 OverrideDataType(output, dataType),
1300 reason);
kevmay0190539692018-11-29 08:40:19 +00001301 break;
1302 }
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001303 case LayerType::Prelu:
1304 {
1305 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1306 const TensorInfo& alpha = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1307 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001308 result = layerSupportObject.IsPreluSupported(OverrideDataType(input, dataType),
1309 OverrideDataType(alpha, dataType),
1310 OverrideDataType(output, dataType),
1311 reason);
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001312 break;
1313 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001314 case LayerType::Transpose:
1315 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001316 auto cLayer = PolymorphicDowncast<const TransposeLayer*>(&layer);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001317 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1318 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001319 result = layerSupportObject.IsTransposeSupported(OverrideDataType(input, dataType),
1320 OverrideDataType(output, dataType),
1321 cLayer->GetParameters(),
1322 reason);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001323 break;
1324 }
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001325 case LayerType::TransposeConvolution2d:
1326 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001327 auto cLayer = PolymorphicDowncast<const TransposeConvolution2dLayer*>(&layer);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001328
1329 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1330 dataType);
1331 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1332
1333 const TransposeConvolution2dDescriptor& descriptor = cLayer->GetParameters();
1334
1335 Optional<TensorInfo> biases;
1336 if (descriptor.m_BiasEnabled)
1337 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001338 ARMNN_ASSERT(cLayer->m_Bias.get() != nullptr);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001339 biases = OverrideDataType(cLayer->m_Bias->GetTensorInfo(),
1340 GetBiasTypeFromWeightsType(dataType));
1341 }
1342
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001343 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001344 const TensorInfo weights = OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType);
1345
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001346 result = layerSupportObject.IsTransposeConvolution2dSupported(input,
1347 output,
1348 descriptor,
1349 weights,
1350 biases,
1351 reason);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001352
1353 break;
1354 }
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00001355 case LayerType::Reduce:
1356 {
1357 auto cLayer = PolymorphicDowncast<const ReduceLayer*>(&layer);
1358 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1359 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1360
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001361 result = layerSupportObject.IsReduceSupported(OverrideDataType(input, dataType),
1362 OverrideDataType(output, dataType),
1363 cLayer->GetParameters(),
1364 reason);
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00001365 break;
1366 }
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001367 case LayerType::UnidirectionalSequenceLstm:
1368 {
1369 auto cLayer = PolymorphicDowncast<const UnidirectionalSequenceLstmLayer*>(&layer);
1370 const UnidirectionalSequenceLstmDescriptor& descriptor = cLayer->GetParameters();
1371
1372 // All inputs.
1373 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1374 dataType);
1375 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
1376 dataType);
1377 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
1378 dataType);
1379 // Outputs
Mike Kelly12994962022-04-21 11:57:09 +01001380 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1381 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
1382 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001383
1384 // Basic parameters
1385 const TensorInfo& inputToForgetWeights
1386 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
1387 const TensorInfo& inputToCellWeights
1388 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
1389 const TensorInfo& inputToOutputWeights
1390 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
1391 const TensorInfo& recurrentToForgetWeights
1392 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
1393 const TensorInfo& recurrentToCellWeights
1394 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
1395 const TensorInfo& recurrentToOutputWeights
1396 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
1397 const TensorInfo& forgetGateBias
1398 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
1399 const TensorInfo& cellBias
1400 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
1401 const TensorInfo& outputGateBias
1402 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
1403
1404 LstmInputParamsInfo paramsInfo;
1405
1406 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
1407 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
1408 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
1409 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
1410 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
1411 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
1412 paramsInfo.m_ForgetGateBias = &forgetGateBias;
1413 paramsInfo.m_CellBias = &cellBias;
1414 paramsInfo.m_OutputGateBias = &outputGateBias;
1415
1416 // Optional parameters
1417 TensorInfo optInputToInputWeights;
1418 TensorInfo optRecurrentToInputWeights;
1419 TensorInfo optCellToInputWeights;
1420 TensorInfo optInputGateBias;
1421 TensorInfo optProjectionWeights;
1422 TensorInfo optProjectionBias;
1423 TensorInfo optCellToForgetWeights;
1424 TensorInfo optCellToOutputWeights;
1425 TensorInfo optInputLayerNormWeights;
1426 TensorInfo optForgetLayerNormWeights;
1427 TensorInfo optCellLayerNormWeights;
1428 TensorInfo optOutputLayerNormWeights;
1429
1430 if(!descriptor.m_CifgEnabled)
1431 {
1432 optInputToInputWeights =
1433 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
1434 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
1435
1436 optRecurrentToInputWeights =
1437 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
1438 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
1439 optInputGateBias =
1440 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
1441 paramsInfo.m_InputGateBias = &optInputGateBias;
1442 }
1443
1444 if(descriptor.m_ProjectionEnabled)
1445 {
1446 optProjectionWeights =
1447 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
1448 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
1449 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
1450 {
1451 optProjectionBias =
1452 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
1453 paramsInfo.m_ProjectionBias = &optProjectionBias;
1454 }
1455 }
1456
1457 if(descriptor.m_PeepholeEnabled)
1458 {
1459 if(!descriptor.m_CifgEnabled)
1460 {
1461 optCellToInputWeights =
1462 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
1463 dataType);
1464 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
1465 }
1466 optCellToForgetWeights =
1467 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
1468 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
1469 optCellToOutputWeights =
1470 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
1471 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
1472 }
1473
1474 if(descriptor.m_LayerNormEnabled)
1475 {
1476 if (!descriptor.m_CifgEnabled)
1477 {
1478 optInputLayerNormWeights = OverrideDataType(
1479 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
1480 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
1481 }
1482
1483 optForgetLayerNormWeights = OverrideDataType(
1484 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
1485 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
1486
1487 optCellLayerNormWeights = OverrideDataType(
1488 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
1489 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
1490
1491 optOutputLayerNormWeights = OverrideDataType(
1492 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
1493 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
1494 }
1495
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001496 result = layerSupportObject.IsUnidirectionalSequenceLstmSupported(input,
1497 outputStateIn,
1498 cellStateIn,
Mike Kelly12994962022-04-21 11:57:09 +01001499 outputStateOut,
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001500 cellStateOut,
Mike Kelly12994962022-04-21 11:57:09 +01001501 output,
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001502 descriptor,
1503 paramsInfo,
1504 reason);
1505 break;
1506 }
telsoa014fcda012018-03-09 14:13:49 +00001507 default:
1508 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001509 ARMNN_ASSERT_MSG(false, "WorkloadFactory did not recognise type of layer.");
David Beck33f0ae02018-10-18 15:13:56 +01001510 reason.value() = "Unrecognised layer type";
telsoa014fcda012018-03-09 14:13:49 +00001511 result = false;
1512 break;
1513 }
1514 }
telsoa014fcda012018-03-09 14:13:49 +00001515 return result;
1516}
1517
Sadik Armagan045f6be2020-09-10 13:37:32 +01001518bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
1519 const IConnectableLayer& connectableLayer,
1520 Optional<DataType> dataType,
1521 std::string& outReasonIfUnsupported)
1522{
1523 return IsLayerConfigurationSupported(backendId, connectableLayer, dataType, outReasonIfUnsupported);
1524}
1525
David Beckdcb751f2018-10-03 11:42:42 +01001526bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +01001527 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +01001528 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +00001529{
Jan Eilersbb446e52020-04-02 13:56:54 +01001530 auto layer = PolymorphicDowncast<const Layer*>(&connectableLayer);
Sadik Armagan045f6be2020-09-10 13:37:32 +01001531 return IsLayerConfigurationSupported(layer->GetBackendId(), connectableLayer, dataType, outReasonIfUnsupported);
1532}
1533
1534// TODO merge with defaulted modelOptions above
1535bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
1536 Optional<DataType> dataType,
1537 std::string& outReasonIfUnsupported,
1538 const ModelOptions& modelOptions)
1539{
1540 auto layer = PolymorphicDowncast<const Layer*>(&connectableLayer);
1541 return IsLayerConfigurationSupported(layer->GetBackendId(),
1542 connectableLayer,
1543 dataType,
1544 outReasonIfUnsupported,
1545 modelOptions);
telsoa014fcda012018-03-09 14:13:49 +00001546}
1547
Sadik Armagan04a72972020-09-14 15:44:18 +01001548bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
1549 const IConnectableLayer& connectableLayer,
1550 Optional<DataType> dataType,
1551 std::string& outReasonIfUnsupported,
1552 const ModelOptions& modelOptions)
1553{
1554 return IsLayerConfigurationSupported(backendId,
1555 connectableLayer,
1556 dataType,
1557 outReasonIfUnsupported,
1558 modelOptions);
1559}
Teresa Charlin611c7fb2022-01-07 09:47:29 +00001560ARMNN_NO_DEPRECATE_WARN_BEGIN
1561std::unique_ptr<IWorkload> IWorkloadFactory::CreateWorkload(LayerType type,
1562 const QueueDescriptor& descriptor,
1563 const WorkloadInfo& info) const
1564{
1565 switch(type)
1566 {
1567 case LayerType::Activation :
1568 {
1569 auto activationQueueDescriptor = PolymorphicDowncast<const ActivationQueueDescriptor*>(&descriptor);
1570 return CreateActivation(*activationQueueDescriptor, info);
1571 }
1572 case LayerType::Addition :
1573 {
1574 auto additionQueueDescriptor = PolymorphicDowncast<const AdditionQueueDescriptor*>(&descriptor);
1575 return CreateAddition(*additionQueueDescriptor, info);
1576 }
1577 case LayerType::ArgMinMax :
1578 {
1579 auto argMinMaxQueueDescriptor = PolymorphicDowncast<const ArgMinMaxQueueDescriptor*>(&descriptor);
1580 return CreateArgMinMax(*argMinMaxQueueDescriptor, info);
1581 }
1582 case LayerType::BatchNormalization :
1583 {
1584 auto batchNormQueueDescriptor = PolymorphicDowncast<const BatchNormalizationQueueDescriptor*>(&descriptor);
1585 return CreateBatchNormalization(*batchNormQueueDescriptor, info);
1586 }
1587 case LayerType::BatchToSpaceNd :
1588 {
1589 auto batchToSpaceNdQueueDescriptor
1590 = PolymorphicDowncast<const BatchToSpaceNdQueueDescriptor*>(&descriptor);
1591 return CreateBatchToSpaceNd(*batchToSpaceNdQueueDescriptor, info);
1592 }
1593 case LayerType::Cast :
1594 {
1595 auto castQueueDescriptor = PolymorphicDowncast<const CastQueueDescriptor*>(&descriptor);
1596 return CreateCast(*castQueueDescriptor, info);
1597 }
1598 case LayerType::ChannelShuffle :
1599 {
1600 auto channelShuffleQueueDescriptor
1601 = PolymorphicDowncast<const ChannelShuffleQueueDescriptor*>(&descriptor);
1602 return CreateChannelShuffle(*channelShuffleQueueDescriptor, info);
1603 }
1604 case LayerType::Comparison :
1605 {
1606 auto comparisonQueueDescriptor = PolymorphicDowncast<const ComparisonQueueDescriptor*>(&descriptor);
1607 return CreateComparison(*comparisonQueueDescriptor, info);
1608 }
1609 case LayerType::Concat :
1610 {
1611 auto concatQueueDescriptor = PolymorphicDowncast<const ConcatQueueDescriptor*>(&descriptor);
1612 return CreateConcat(*concatQueueDescriptor, info);
1613 }
1614 case LayerType::Constant :
1615 {
1616 auto constantQueueDescriptor = PolymorphicDowncast<const ConstantQueueDescriptor*>(&descriptor);
1617 return CreateConstant(*constantQueueDescriptor, info);
1618 }
Teresa Charlin611c7fb2022-01-07 09:47:29 +00001619 case LayerType::ConvertFp16ToFp32:
1620 {
1621 auto convertFp16ToFp32QueueDescriptor
1622 = PolymorphicDowncast<const ConvertFp16ToFp32QueueDescriptor*>(&descriptor);
1623 return CreateConvertFp16ToFp32(*convertFp16ToFp32QueueDescriptor, info);
1624 }
Teresa Charlin611c7fb2022-01-07 09:47:29 +00001625 case LayerType::ConvertFp32ToFp16:
1626 {
1627 auto convertFp32ToFp16QueueDescriptor
1628 = PolymorphicDowncast<const ConvertFp32ToFp16QueueDescriptor*>(&descriptor);
1629 return CreateConvertFp32ToFp16(*convertFp32ToFp16QueueDescriptor, info);
1630 }
1631 case LayerType::Convolution2d:
1632 {
1633 auto convolution2dQueueDescriptor = PolymorphicDowncast<const Convolution2dQueueDescriptor*>(&descriptor);
1634 return CreateConvolution2d(*convolution2dQueueDescriptor, info);
1635 }
1636 case LayerType::Convolution3d:
1637 {
1638 auto convolution3dQueueDescriptor = PolymorphicDowncast<const Convolution3dQueueDescriptor*>(&descriptor);
1639 return CreateConvolution3d(*convolution3dQueueDescriptor, info);
1640 }
1641 case LayerType::Debug:
1642 {
1643 auto debugQueueDescriptor = PolymorphicDowncast<const DebugQueueDescriptor*>(&descriptor);
1644 return CreateDebug(*debugQueueDescriptor, info);
1645 }
1646 case LayerType::DepthToSpace:
1647 {
1648 auto depthToSpaceQueueDescriptor = PolymorphicDowncast<const DepthToSpaceQueueDescriptor*>(&descriptor);
1649 return CreateDepthToSpace(*depthToSpaceQueueDescriptor, info);
1650 }
1651 case LayerType::DepthwiseConvolution2d:
1652 {
1653 auto depthwiseConvolution2DQueueDescriptor
1654 = PolymorphicDowncast<const DepthwiseConvolution2dQueueDescriptor*>(&descriptor);
1655 return CreateDepthwiseConvolution2d(*depthwiseConvolution2DQueueDescriptor, info);
1656 }
1657 case LayerType::Dequantize:
1658 {
1659 auto dequantizeQueueDescriptor = PolymorphicDowncast<const DequantizeQueueDescriptor*>(&descriptor);
1660 return CreateDequantize(*dequantizeQueueDescriptor, info);
1661 }
1662 case LayerType::DetectionPostProcess:
1663 {
1664 auto detectionPostProcessQueueDescriptor
1665 = PolymorphicDowncast<const DetectionPostProcessQueueDescriptor*>(&descriptor);
1666 return CreateDetectionPostProcess(*detectionPostProcessQueueDescriptor, info);
1667 }
1668 case LayerType::Division:
1669 {
1670 auto divisionQueueDescriptor = PolymorphicDowncast<const DivisionQueueDescriptor*>(&descriptor);
1671 return CreateDivision(*divisionQueueDescriptor, info);
1672 }
1673 case LayerType::ElementwiseUnary:
1674 {
1675 auto elementwiseUnaryQueueDescriptor
1676 = PolymorphicDowncast<const ElementwiseUnaryQueueDescriptor*>(&descriptor);
1677 return CreateElementwiseUnary(*elementwiseUnaryQueueDescriptor, info);
1678
1679 }
1680 case LayerType::FakeQuantization:
1681 {
1682 auto fakeQuantizationQueueDescriptor
1683 = PolymorphicDowncast<const FakeQuantizationQueueDescriptor*>(&descriptor);
1684 return CreateFakeQuantization(*fakeQuantizationQueueDescriptor, info);
1685 }
1686 case LayerType::Fill:
1687 {
1688 auto fillQueueDescriptor = PolymorphicDowncast<const FillQueueDescriptor*>(&descriptor);
1689 return CreateFill(*fillQueueDescriptor, info);
1690 }
1691 case LayerType::Floor:
1692 {
1693 auto floorQueueDescriptor = PolymorphicDowncast<const FloorQueueDescriptor*>(&descriptor);
1694 return CreateFloor(*floorQueueDescriptor, info);
1695 }
1696 case LayerType::FullyConnected:
1697 {
1698 auto fullyConnectedQueueDescriptor
1699 = PolymorphicDowncast<const FullyConnectedQueueDescriptor*>(&descriptor);
1700 return CreateFullyConnected(*fullyConnectedQueueDescriptor, info);
1701 }
1702 case LayerType::Gather:
1703 {
1704 auto gatherQueueDescriptor = PolymorphicDowncast<const GatherQueueDescriptor*>(&descriptor);
1705 return CreateGather(*gatherQueueDescriptor, info);
1706 }
1707 case LayerType::Input:
1708 {
1709 auto inputQueueDescriptor = PolymorphicDowncast<const InputQueueDescriptor*>(&descriptor);
1710 return CreateInput(*inputQueueDescriptor, info);
1711 }
1712 case LayerType::InstanceNormalization:
1713 {
1714 auto instanceNormalizationQueueDescriptor
1715 = PolymorphicDowncast<const InstanceNormalizationQueueDescriptor*>(&descriptor);
1716 return CreateInstanceNormalization(*instanceNormalizationQueueDescriptor, info);
1717 }
1718 case LayerType::L2Normalization:
1719 {
1720 auto l2NormalizationQueueDescriptor
1721 = PolymorphicDowncast<const L2NormalizationQueueDescriptor*>(&descriptor);
1722 return CreateL2Normalization(*l2NormalizationQueueDescriptor, info);
1723 }
1724 case LayerType::LogicalBinary:
1725 {
1726 auto logicalBinaryQueueDescriptor = PolymorphicDowncast<const LogicalBinaryQueueDescriptor*>(&descriptor);
1727 return CreateLogicalBinary(*logicalBinaryQueueDescriptor, info);
1728 }
1729 case LayerType::LogSoftmax:
1730 {
1731 auto logSoftmaxQueueDescriptor = PolymorphicDowncast<const LogSoftmaxQueueDescriptor*>(&descriptor);
1732 return CreateLogSoftmax(*logSoftmaxQueueDescriptor, info);
1733 }
1734 case LayerType::Lstm:
1735 {
1736 auto lstmQueueDescriptor = PolymorphicDowncast<const LstmQueueDescriptor*>(&descriptor);
1737 return CreateLstm(*lstmQueueDescriptor, info);
1738 }
1739 case LayerType::Maximum:
1740 {
1741 auto maximumQueueDescriptor = PolymorphicDowncast<const MaximumQueueDescriptor*>(&descriptor);
1742 return CreateMaximum(*maximumQueueDescriptor, info);
1743 }
1744 case LayerType::Mean:
1745 {
1746 auto meanQueueDescriptor = PolymorphicDowncast<const MeanQueueDescriptor*>(&descriptor);
1747 return CreateMean(*meanQueueDescriptor, info);
1748 }
1749 case LayerType::MemCopy:
1750 {
1751 auto memCopyQueueDescriptor = PolymorphicDowncast<const MemCopyQueueDescriptor*>(&descriptor);
1752 return CreateMemCopy(*memCopyQueueDescriptor, info);
1753 }
1754 case LayerType::MemImport:
1755 {
1756 auto memImportQueueDescriptor = PolymorphicDowncast<const MemImportQueueDescriptor*>(&descriptor);
1757 return CreateMemImport(*memImportQueueDescriptor, info);
1758 }
1759 case LayerType::Minimum:
1760 {
1761 auto minimumQueueDescriptor = PolymorphicDowncast<const MinimumQueueDescriptor*>(&descriptor);
1762 return CreateMinimum(*minimumQueueDescriptor, info);
1763 }
1764 case LayerType::Multiplication:
1765 {
1766 auto multiplicationQueueDescriptor
1767 = PolymorphicDowncast<const MultiplicationQueueDescriptor*>(&descriptor);
1768 return CreateMultiplication(*multiplicationQueueDescriptor, info);
1769 }
1770 case LayerType::Normalization:
1771 {
1772 auto normalizationQueueDescriptor = PolymorphicDowncast<const NormalizationQueueDescriptor*>(&descriptor);
1773 return CreateNormalization(*normalizationQueueDescriptor, info);
1774 }
1775 case LayerType::Output:
1776 {
1777 auto outputQueueDescriptor = PolymorphicDowncast<const OutputQueueDescriptor*>(&descriptor);
1778 return CreateOutput(*outputQueueDescriptor, info);
1779 }
1780 case LayerType::Pad:
1781 {
1782 auto padQueueDescriptor = PolymorphicDowncast<const PadQueueDescriptor*>(&descriptor);
1783 return CreatePad(*padQueueDescriptor, info);
1784 }
1785 case LayerType::Permute:
1786 {
1787 auto permuteQueueDescriptor = PolymorphicDowncast<const PermuteQueueDescriptor*>(&descriptor);
1788 return CreatePermute(*permuteQueueDescriptor, info);
1789 }
1790 case LayerType::Pooling2d:
1791 {
1792 auto pooling2dQueueDescriptor = PolymorphicDowncast<const Pooling2dQueueDescriptor*>(&descriptor);
1793 return CreatePooling2d(*pooling2dQueueDescriptor, info);
1794 }
1795 case LayerType::Pooling3d:
1796 {
1797 auto pooling3dQueueDescriptor = PolymorphicDowncast<const Pooling3dQueueDescriptor*>(&descriptor);
1798 return CreatePooling3d(*pooling3dQueueDescriptor, info);
1799 }
1800 case LayerType::PreCompiled:
1801 {
1802 auto preCompiledQueueDescriptor = PolymorphicDowncast<const PreCompiledQueueDescriptor*>(&descriptor);
1803 return CreatePreCompiled(*preCompiledQueueDescriptor, info);
1804 }
1805 case LayerType::Prelu:
1806 {
1807 auto preluQueueDescriptor = PolymorphicDowncast<const PreluQueueDescriptor*>(&descriptor);
1808 return CreatePrelu(*preluQueueDescriptor, info);
1809 }
1810 case LayerType::QLstm:
1811 {
1812 auto qlstmQueueDescriptor = PolymorphicDowncast<const QLstmQueueDescriptor*>(&descriptor);
1813 return CreateQLstm(*qlstmQueueDescriptor, info);
1814 }
1815 case LayerType::Quantize:
1816 {
1817 auto quantizeQueueDescriptor = PolymorphicDowncast<const QuantizeQueueDescriptor*>(&descriptor);
1818 return CreateQuantize(*quantizeQueueDescriptor, info);
1819 }
1820 case LayerType::Rank:
1821 {
1822 auto rankQueueDescriptor = PolymorphicDowncast<const RankQueueDescriptor*>(&descriptor);
1823 return CreateRank(*rankQueueDescriptor, info);
1824 }
1825 case LayerType::Reduce:
1826 {
1827 auto reduceQueueDescriptor = PolymorphicDowncast<const ReduceQueueDescriptor*>(&descriptor);
1828 return CreateReduce(*reduceQueueDescriptor, info);
1829 }
1830 case LayerType::Reshape:
1831 {
1832 auto reshapeQueueDescriptor = PolymorphicDowncast<const ReshapeQueueDescriptor*>(&descriptor);
1833 return CreateReshape(*reshapeQueueDescriptor, info);
1834 }
1835 case LayerType::Resize:
1836 {
1837 auto resizeQueueDescriptor = PolymorphicDowncast<const ResizeQueueDescriptor*>(&descriptor);
1838 return CreateResize(*resizeQueueDescriptor, info);
1839 }
1840 case LayerType::Shape:
1841 {
1842 auto shapeQueueDescriptor = PolymorphicDowncast<const ShapeQueueDescriptor*>(&descriptor);
1843 return CreateShape(*shapeQueueDescriptor, info);
1844 }
1845 case LayerType::Slice:
1846 {
1847 auto sliceQueueDescriptor = PolymorphicDowncast<const SliceQueueDescriptor*>(&descriptor);
1848 return CreateSlice(*sliceQueueDescriptor, info);
1849 }
1850 case LayerType::Softmax:
1851 {
1852 auto softmaxQueueDescriptor = PolymorphicDowncast<const SoftmaxQueueDescriptor*>(&descriptor);
1853 return CreateSoftmax(*softmaxQueueDescriptor, info);
1854 }
1855 case LayerType::SpaceToBatchNd:
1856 {
1857 auto spaceToBatchNdQueueDescriptor
1858 = PolymorphicDowncast<const SpaceToBatchNdQueueDescriptor*>(&descriptor);
1859 return CreateSpaceToBatchNd(*spaceToBatchNdQueueDescriptor, info);
1860 }
1861 case LayerType::SpaceToDepth:
1862 {
1863 auto spaceToDepthQueueDescriptor = PolymorphicDowncast<const SpaceToDepthQueueDescriptor*>(&descriptor);
1864 return CreateSpaceToDepth(*spaceToDepthQueueDescriptor, info);
1865 }
1866 case LayerType::Splitter:
1867 {
1868 auto splitterQueueDescriptor = PolymorphicDowncast<const SplitterQueueDescriptor*>(&descriptor);
1869 return CreateSplitter(*splitterQueueDescriptor, info);
1870 }
1871 case LayerType::Stack:
1872 {
1873 auto stackQueueDescriptor = PolymorphicDowncast<const StackQueueDescriptor*>(&descriptor);
1874 return CreateStack(*stackQueueDescriptor, info);
1875 }
1876 case LayerType::StridedSlice:
1877 {
1878 auto stridedSliceQueueDescriptor = PolymorphicDowncast<const StridedSliceQueueDescriptor*>(&descriptor);
1879 return CreateStridedSlice(*stridedSliceQueueDescriptor, info);
1880 }
1881 case LayerType::Subtraction:
1882 {
1883 auto subtractionQueueDescriptor = PolymorphicDowncast<const SubtractionQueueDescriptor*>(&descriptor);
1884 return CreateSubtraction(*subtractionQueueDescriptor, info);
1885 }
1886 case LayerType::Transpose:
1887 {
1888 auto transposeQueueDescriptor = PolymorphicDowncast<const TransposeQueueDescriptor*>(&descriptor);
1889 return CreateTranspose(*transposeQueueDescriptor, info);
1890 }
1891 case LayerType::TransposeConvolution2d:
1892 {
1893 auto transposeConvolution2dQueueDescriptor
1894 = PolymorphicDowncast<const TransposeConvolution2dQueueDescriptor*>(&descriptor);
1895 return CreateTransposeConvolution2d(*transposeConvolution2dQueueDescriptor, info);
1896 }
1897 case LayerType::UnidirectionalSequenceLstm:
1898 {
1899 auto unidirectionalSequenceLstmQueueDescriptor
1900 = PolymorphicDowncast<const UnidirectionalSequenceLstmQueueDescriptor*>(&descriptor);
1901 return CreateUnidirectionalSequenceLstm(*unidirectionalSequenceLstmQueueDescriptor, info);
1902 }
1903 default:
1904 return nullptr;
1905 }
1906}
1907ARMNN_NO_DEPRECATE_WARN_END
Sadik Armagan04a72972020-09-14 15:44:18 +01001908
Derek Lamberti901ea112019-12-10 22:07:09 +00001909std::unique_ptr<IWorkload> IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& /*descriptor*/,
1910 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001911{
1912 return std::unique_ptr<IWorkload>();
1913}
1914
Derek Lamberti901ea112019-12-10 22:07:09 +00001915std::unique_ptr<IWorkload> IWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& /*descriptor*/,
1916 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001917{
1918 return std::unique_ptr<IWorkload>();
1919}
1920
Derek Lamberti901ea112019-12-10 22:07:09 +00001921std::unique_ptr<IWorkload> IWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& /*descriptor*/,
1922 const WorkloadInfo& /*info*/) const
Nikhil Rajee391d52019-09-05 17:50:44 +01001923{
1924 return std::unique_ptr<IWorkload>();
1925}
1926
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001927std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00001928 const BatchNormalizationQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001929{
1930 return std::unique_ptr<IWorkload>();
1931}
1932
Derek Lamberti901ea112019-12-10 22:07:09 +00001933std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& /*desc*/,
1934 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001935{
1936 return std::unique_ptr<IWorkload>();
1937}
1938
mathad01b392e982021-04-07 12:07:30 +01001939std::unique_ptr<IWorkload> IWorkloadFactory::CreateCast(const CastQueueDescriptor& /*descriptor*/,
1940 const WorkloadInfo& /*info*/) const
1941{
1942 return std::unique_ptr<IWorkload>();
1943}
1944
Simon Obute51f67772021-09-03 15:50:13 +01001945std::unique_ptr<IWorkload> IWorkloadFactory::CreateChannelShuffle(const ChannelShuffleQueueDescriptor& /*descriptor*/,
1946 const WorkloadInfo& /*info*/) const
1947{
1948 return std::unique_ptr<IWorkload>();
1949}
1950
Derek Lamberti901ea112019-12-10 22:07:09 +00001951std::unique_ptr<IWorkload> IWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& /*descriptor*/,
1952 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001953{
1954 return std::unique_ptr<IWorkload>();
1955}
1956
Derek Lamberti901ea112019-12-10 22:07:09 +00001957std::unique_ptr<IWorkload> IWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& /*descriptor*/,
1958 const WorkloadInfo& /*info*/) const
Jim Flynn4ed6c832019-05-20 11:02:46 +01001959{
1960 return std::unique_ptr<IWorkload>();
1961}
1962
Derek Lamberti901ea112019-12-10 22:07:09 +00001963std::unique_ptr<IWorkload> IWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& /*descriptor*/,
1964 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001965{
1966 return std::unique_ptr<IWorkload>();
1967}
1968
Derek Lamberti901ea112019-12-10 22:07:09 +00001969std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp16ToFp32(const ConvertFp16ToFp32QueueDescriptor& /*desc*/,
1970 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001971{
1972 return std::unique_ptr<IWorkload>();
1973}
1974
Derek Lamberti901ea112019-12-10 22:07:09 +00001975std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToFp16(const ConvertFp32ToFp16QueueDescriptor& /*desc*/,
1976 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001977{
1978 return std::unique_ptr<IWorkload>();
1979}
1980
Derek Lamberti901ea112019-12-10 22:07:09 +00001981std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& /*descriptor*/,
1982 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001983{
1984 return std::unique_ptr<IWorkload>();
1985}
1986
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001987std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution3d(const Convolution3dQueueDescriptor& /*descriptor*/,
1988 const WorkloadInfo& /*info*/) const
1989{
1990 return std::unique_ptr<IWorkload>();
1991}
1992
Derek Lamberti901ea112019-12-10 22:07:09 +00001993std::unique_ptr<IWorkload> IWorkloadFactory::CreateDebug(const DebugQueueDescriptor& /*descriptor*/,
1994 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001995{
1996 return std::unique_ptr<IWorkload>();
1997}
1998
Derek Lamberti901ea112019-12-10 22:07:09 +00001999std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& /*descriptor*/,
2000 const WorkloadInfo& /*info*/) const
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +01002001{
2002 return std::unique_ptr<IWorkload>();
2003}
2004
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002005std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthwiseConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00002006 const DepthwiseConvolution2dQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002007{
2008 return std::unique_ptr<IWorkload>();
2009}
2010
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002011std::unique_ptr<IWorkload> IWorkloadFactory::CreateDequantize(
Derek Lamberti901ea112019-12-10 22:07:09 +00002012 const DequantizeQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002013{
2014 return std::unique_ptr<IWorkload>();
2015}
2016
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002017std::unique_ptr<IWorkload> IWorkloadFactory::CreateDetectionPostProcess(
Derek Lamberti901ea112019-12-10 22:07:09 +00002018 const DetectionPostProcessQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002019{
2020 return std::unique_ptr<IWorkload>();
2021}
2022
Derek Lamberti901ea112019-12-10 22:07:09 +00002023std::unique_ptr<IWorkload> IWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& /*descriptor*/,
2024 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002025{
2026 return std::unique_ptr<IWorkload>();
2027}
2028
josh minor4a3c6102020-01-06 16:40:46 -06002029std::unique_ptr<IWorkload> IWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& /*desc*/,
2030 const WorkloadInfo& /*info*/) const
2031{
2032 return std::unique_ptr<IWorkload>();
2033}
2034
Derek Lamberti901ea112019-12-10 22:07:09 +00002035std::unique_ptr<IWorkload> IWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& /*desc*/,
2036 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002037{
2038 return std::unique_ptr<IWorkload>();
2039}
2040
Ryan OSheaec6c6802020-06-05 17:17:06 +01002041std::unique_ptr<IWorkload> IWorkloadFactory::CreateFill(const FillQueueDescriptor& /*descriptor*/,
2042 const WorkloadInfo& /*info*/) const
2043{
2044 return std::unique_ptr<IWorkload>();
2045}
2046
Derek Lamberti901ea112019-12-10 22:07:09 +00002047std::unique_ptr<IWorkload> IWorkloadFactory::CreateFloor(const FloorQueueDescriptor& /*descriptor*/,
2048 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002049{
2050 return std::unique_ptr<IWorkload>();
2051}
2052
Derek Lamberti901ea112019-12-10 22:07:09 +00002053std::unique_ptr<IWorkload> IWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& /*descriptor*/,
2054 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002055{
2056 return std::unique_ptr<IWorkload>();
2057}
2058
Derek Lamberti901ea112019-12-10 22:07:09 +00002059std::unique_ptr<IWorkload> IWorkloadFactory::CreateGather(const GatherQueueDescriptor& /*descriptor*/,
2060 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002061{
2062 return std::unique_ptr<IWorkload>();
2063}
2064
Kevin Mayce5045a2019-10-02 14:07:47 +01002065std::unique_ptr<IWorkload> IWorkloadFactory::CreateInstanceNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00002066 const InstanceNormalizationQueueDescriptor& /*descriptor*/,
2067 const WorkloadInfo& /*info*/) const
Kevin Mayce5045a2019-10-02 14:07:47 +01002068{
2069 return std::unique_ptr<IWorkload>();
2070}
2071
Derek Lamberti901ea112019-12-10 22:07:09 +00002072std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& /*desc*/,
2073 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002074{
2075 return std::unique_ptr<IWorkload>();
2076}
2077
James Conroyaba90cd2020-11-06 16:28:18 +00002078std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& /*desc*/,
2079 const WorkloadInfo& /*info*/) const
2080{
2081 return std::unique_ptr<IWorkload>();
2082}
2083
2084std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogicalUnary(const ElementwiseUnaryQueueDescriptor& /*desc*/,
2085 const WorkloadInfo& /*info*/) const
2086{
2087 return std::unique_ptr<IWorkload>();
2088}
2089
Derek Lamberti901ea112019-12-10 22:07:09 +00002090std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& /*descriptor*/,
2091 const WorkloadInfo& /*info*/) const
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +01002092{
2093 return std::unique_ptr<IWorkload>();
2094}
2095
Derek Lamberti901ea112019-12-10 22:07:09 +00002096std::unique_ptr<IWorkload> IWorkloadFactory::CreateLstm(const LstmQueueDescriptor& /*descriptor*/,
2097 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002098{
2099 return std::unique_ptr<IWorkload>();
2100}
2101
Derek Lamberti901ea112019-12-10 22:07:09 +00002102std::unique_ptr<IWorkload> IWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& /*descriptor*/,
2103 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002104{
2105 return std::unique_ptr<IWorkload>();
2106}
2107
Derek Lamberti901ea112019-12-10 22:07:09 +00002108std::unique_ptr<IWorkload> IWorkloadFactory::CreateMean(const MeanQueueDescriptor& /*descriptor*/,
2109 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002110{
2111 return std::unique_ptr<IWorkload>();
2112}
2113
Derek Lamberti901ea112019-12-10 22:07:09 +00002114std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& /*descriptor*/,
2115 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002116{
2117 return std::unique_ptr<IWorkload>();
2118}
2119
Derek Lamberti901ea112019-12-10 22:07:09 +00002120std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& /*descriptor*/,
2121 const WorkloadInfo& /*info*/) const
Derek Lambertif674aa02019-08-01 15:56:25 +01002122{
2123 return std::unique_ptr<IWorkload>();
2124}
2125
Derek Lamberti901ea112019-12-10 22:07:09 +00002126std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerge(const MergeQueueDescriptor& /*descriptor*/,
2127 const WorkloadInfo& /*info*/) const
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002128{
2129 return std::unique_ptr<IWorkload>();
2130}
2131
Derek Lamberti901ea112019-12-10 22:07:09 +00002132std::unique_ptr<IWorkload> IWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& /*descriptor*/,
2133 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002134{
2135 return std::unique_ptr<IWorkload>();
2136}
2137
Derek Lamberti901ea112019-12-10 22:07:09 +00002138std::unique_ptr<IWorkload> IWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& /*descriptor*/,
2139 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002140{
2141 return std::unique_ptr<IWorkload>();
2142}
2143
Derek Lamberti901ea112019-12-10 22:07:09 +00002144std::unique_ptr<IWorkload> IWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& /*descriptor*/,
2145 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002146{
2147 return std::unique_ptr<IWorkload>();
2148}
2149
Derek Lamberti901ea112019-12-10 22:07:09 +00002150std::unique_ptr<IWorkload> IWorkloadFactory::CreateOutput(const OutputQueueDescriptor& /*descriptor*/,
2151 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002152{
2153 return std::unique_ptr<IWorkload>();
2154}
2155
Derek Lamberti901ea112019-12-10 22:07:09 +00002156std::unique_ptr<IWorkload> IWorkloadFactory::CreatePad(const PadQueueDescriptor& /*descriptor*/,
2157 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002158{
2159 return std::unique_ptr<IWorkload>();
2160}
2161
Derek Lamberti901ea112019-12-10 22:07:09 +00002162std::unique_ptr<IWorkload> IWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& /*descriptor*/,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002163 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002164{
2165 return std::unique_ptr<IWorkload>();
2166}
2167
Derek Lamberti901ea112019-12-10 22:07:09 +00002168std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& /*descriptor*/,
2169 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002170{
2171 return std::unique_ptr<IWorkload>();
2172}
2173
Tamás Nyíri7b885b32021-10-26 14:47:57 +01002174std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling3d(const Pooling3dQueueDescriptor& /*descriptor*/,
2175 const WorkloadInfo& /*info*/) const
2176{
2177 return std::unique_ptr<IWorkload>();
2178}
2179
Derek Lamberti901ea112019-12-10 22:07:09 +00002180std::unique_ptr<IWorkload> IWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
2181 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002182{
2183 return std::unique_ptr<IWorkload>();
2184}
2185
Derek Lamberti901ea112019-12-10 22:07:09 +00002186std::unique_ptr<IWorkload> IWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &/*descriptor*/,
2187 const WorkloadInfo &/*info*/) const
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01002188{
2189 return std::unique_ptr<IWorkload>();
2190}
2191
Derek Lamberti901ea112019-12-10 22:07:09 +00002192std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& /*descriptor*/,
2193 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002194{
2195 return std::unique_ptr<IWorkload>();
2196}
2197
James Conroy586a9aa2020-03-20 08:49:33 +00002198std::unique_ptr<IWorkload> IWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& /*descriptor*/,
2199 const WorkloadInfo& /*info*/) const
2200{
2201 return std::unique_ptr<IWorkload>();
2202}
2203
Derek Lamberti901ea112019-12-10 22:07:09 +00002204std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& /*descriptor*/,
2205 const WorkloadInfo& /*info*/) const
James Conroyee18dc82019-07-17 11:27:46 +01002206{
2207 return std::unique_ptr<IWorkload>();
2208}
Finn Williams2605b232020-06-10 15:53:46 +01002209std::unique_ptr<IWorkload> IWorkloadFactory::CreateRank(const RankQueueDescriptor& /*descriptor*/,
2210 const WorkloadInfo& /*info*/) const
2211{
2212 return std::unique_ptr<IWorkload>();
2213}
James Conroyee18dc82019-07-17 11:27:46 +01002214
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002215std::unique_ptr<IWorkload> IWorkloadFactory::CreateReduce(const ReduceQueueDescriptor& /*descriptor*/,
2216 const WorkloadInfo& /*info*/) const
2217{
2218 return std::unique_ptr<IWorkload>();
2219}
2220
Derek Lamberti901ea112019-12-10 22:07:09 +00002221std::unique_ptr<IWorkload> IWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& /*descriptor*/,
2222 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002223{
2224 return std::unique_ptr<IWorkload>();
2225}
2226
Derek Lamberti901ea112019-12-10 22:07:09 +00002227std::unique_ptr<IWorkload> IWorkloadFactory::CreateResize(const ResizeQueueDescriptor& /*descriptor*/,
2228 const WorkloadInfo& /*info*/) const
Teresa Charlina9075df2019-06-27 15:41:57 +01002229{
2230 return std::unique_ptr<IWorkload>();
2231}
2232
Keith Davis3ae3f972021-05-21 16:33:48 +01002233std::unique_ptr<IWorkload> IWorkloadFactory::CreateShape(const ShapeQueueDescriptor& /*descriptor*/,
2234 const WorkloadInfo& /*info*/) const
2235{
2236 return std::unique_ptr<IWorkload>();
2237}
2238
Derek Lamberti901ea112019-12-10 22:07:09 +00002239std::unique_ptr<IWorkload> IWorkloadFactory::CreateSlice(const SliceQueueDescriptor& /*descriptor*/,
2240 const WorkloadInfo& /*info*/) const
2241{
2242 return std::unique_ptr<IWorkload>();
2243}
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002244
Derek Lamberti901ea112019-12-10 22:07:09 +00002245std::unique_ptr<IWorkload> IWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& /*descriptor*/,
2246 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01002247{
2248 return std::unique_ptr<IWorkload>();
2249}
2250
Derek Lamberti901ea112019-12-10 22:07:09 +00002251std::unique_ptr<IWorkload> IWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& /*descriptor*/,
2252 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002253{
2254 return std::unique_ptr<IWorkload>();
2255}
2256
Derek Lamberti901ea112019-12-10 22:07:09 +00002257std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& /*descriptor*/,
2258 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002259{
2260 return std::unique_ptr<IWorkload>();
2261}
2262
Derek Lamberti901ea112019-12-10 22:07:09 +00002263std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& /*descriptor*/,
2264 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002265{
2266 return std::unique_ptr<IWorkload>();
2267}
2268
Derek Lamberti901ea112019-12-10 22:07:09 +00002269std::unique_ptr<IWorkload> IWorkloadFactory::CreateStack(const StackQueueDescriptor& /*descriptor*/,
2270 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar972af152019-06-11 14:14:03 +01002271{
2272 return std::unique_ptr<IWorkload>();
2273}
2274
Derek Lamberti901ea112019-12-10 22:07:09 +00002275std::unique_ptr<IWorkload> IWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& /*descriptor*/,
2276 const WorkloadInfo& /*info*/) const
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01002277{
2278 return std::unique_ptr<IWorkload>();
2279}
2280
Derek Lamberti901ea112019-12-10 22:07:09 +00002281std::unique_ptr<IWorkload> IWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& /*descriptor*/,
2282 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002283{
2284 return std::unique_ptr<IWorkload>();
2285}
2286
Derek Lamberti901ea112019-12-10 22:07:09 +00002287std::unique_ptr<IWorkload> IWorkloadFactory::CreateSwitch(const SwitchQueueDescriptor& /*descriptor*/,
2288 const WorkloadInfo& /*info*/) const
Sadik Armaganeff363d2019-04-05 15:25:46 +01002289{
2290 return std::unique_ptr<IWorkload>();
2291}
2292
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002293std::unique_ptr<IWorkload> IWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& /*descriptor*/,
2294 const WorkloadInfo& /*info*/) const
2295{
2296 return std::unique_ptr<IWorkload>();
2297}
2298
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01002299std::unique_ptr<IWorkload> IWorkloadFactory::CreateTransposeConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00002300 const TransposeConvolution2dQueueDescriptor& /*descriptor*/,
2301 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01002302{
2303 return std::unique_ptr<IWorkload>();
surmeh013537c2c2018-05-18 16:31:43 +01002304}
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01002305
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01002306std::unique_ptr<IWorkload> IWorkloadFactory::CreateUnidirectionalSequenceLstm(
2307 const UnidirectionalSequenceLstmQueueDescriptor& /*descriptor*/,
2308 const WorkloadInfo& /*info*/) const
2309{
2310 return std::unique_ptr<IWorkload>();
2311}
2312
Francis Murtagh9270d9e2022-08-12 13:54:17 +01002313std::unique_ptr<IWorkload> IWorkloadFactory::CreateInput(
2314 const InputQueueDescriptor& /*descriptor*/,
2315 const WorkloadInfo& /*info*/) const
2316{
2317 return std::unique_ptr<IWorkload>();
2318}
2319
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01002320} // namepsace armnn