blob: 51bc3e60cbbec8ab373f776f73645b2339385d08 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Mike Kelly3ec30772023-03-08 13:47:17 +00002// Copyright © 2017-2023 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00005
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <Layer.hpp>
7#include <LayersFwd.hpp>
David Beckdcb751f2018-10-03 11:42:42 +01008
David Beckb4540be2018-09-24 13:18:27 +01009#include <armnn/Types.hpp>
Sadik Armagana097d2a2021-11-24 15:47:28 +000010#include <armnn/backends/IBackendInternal.hpp>
Francis Murtaghcae45682021-04-26 10:07:49 +010011#include <armnn/backends/ILayerSupport.hpp>
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000012#include <armnn/BackendHelper.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000013#include <armnn/BackendRegistry.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010014#include <armnn/utility/PolymorphicDowncast.hpp>
Finn Williams3e54d032020-10-22 16:53:35 +010015#include <armnn/utility/TransformIterator.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000016
Colm Donelan0c479742021-12-10 12:43:54 +000017#include <armnn/backends/WorkloadFactory.hpp>
telsoa014fcda012018-03-09 14:13:49 +000018
David Beck111b5d92018-11-12 14:59:37 +000019#include <sstream>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000020
telsoa014fcda012018-03-09 14:13:49 +000021namespace armnn
22{
23
telsoa01c577f2c2018-08-31 09:22:23 +010024namespace
25{
Finn Williams3e54d032020-10-22 16:53:35 +010026using LayerList = std::list<Layer*>;
27using Iterator = LayerList::const_iterator; // Const so pointers in the list can't be modified externally.
telsoa01c577f2c2018-08-31 09:22:23 +010028
David Beck29c75de2018-10-23 13:35:58 +010029const TensorInfo OverrideDataType(const TensorInfo& info, Optional<DataType> type)
30{
31 if (!type)
32 {
33 return info;
telsoa01c577f2c2018-08-31 09:22:23 +010034 }
35
Matthew Sloyan81beae32021-07-13 19:46:11 +010036 return TensorInfo(info.GetShape(),
37 type.value(),
38 info.GetQuantizationScale(),
39 info.GetQuantizationOffset(),
40 info.IsConstant());
telsoa01c577f2c2018-08-31 09:22:23 +010041}
42
David Beck29c75de2018-10-23 13:35:58 +010043} // anonymous namespace
44
Sadik Armagana097d2a2021-11-24 15:47:28 +000045inline armnn::Optional<armnn::DataType> GetBiasTypeFromWeightsType(armnn::Optional<armnn::DataType> weightsType)
46{
47 if (!weightsType)
48 {
49 return weightsType;
50 }
51
52 switch(weightsType.value())
53 {
54 case armnn::DataType::BFloat16:
55 case armnn::DataType::Float16:
56 case armnn::DataType::Float32:
57 return weightsType;
58 case armnn::DataType::QAsymmS8:
59 case armnn::DataType::QAsymmU8:
60 case armnn::DataType::QSymmS8:
61 case armnn::DataType::QSymmS16:
62 return armnn::DataType::Signed32;
63 default:
64 ARMNN_ASSERT_MSG(false, "GetBiasTypeFromWeightsType(): Unsupported data type.");
65 }
66 return armnn::EmptyOptional();
67}
68
69
Sadik Armagan045f6be2020-09-10 13:37:32 +010070bool IWorkloadFactory::IsLayerConfigurationSupported(const BackendId& backendId,
71 const IConnectableLayer& connectableLayer,
72 Optional<DataType> dataType,
73 std::string& outReasonIfUnsupported,
74 const ModelOptions& modelOptions)
telsoa014fcda012018-03-09 14:13:49 +000075{
David Beck33f0ae02018-10-18 15:13:56 +010076 Optional<std::string&> reason = outReasonIfUnsupported;
telsoa014fcda012018-03-09 14:13:49 +000077 bool result;
Jan Eilersbb446e52020-04-02 13:56:54 +010078 const Layer& layer = *(PolymorphicDowncast<const Layer*>(&connectableLayer));
David Beckdcb751f2018-10-03 11:42:42 +010079
David Beck111b5d92018-11-12 14:59:37 +000080 auto const& backendRegistry = BackendRegistryInstance();
81 if (!backendRegistry.IsBackendRegistered(backendId))
82 {
83 std::stringstream ss;
84 ss << connectableLayer.GetName() << " is not supported on " << backendId
85 << " because this backend is not registered.";
86
87 outReasonIfUnsupported = ss.str();
88 return false;
89 }
90
91 auto backendFactory = backendRegistry.GetFactory(backendId);
92 auto backendObject = backendFactory();
Mike Kelly3ec30772023-03-08 13:47:17 +000093 auto layerSupport = backendObject->GetLayerSupport(modelOptions);
94 auto layerSupportObject = LayerSupportHandle(layerSupport, backendId);
David Beck33f0ae02018-10-18 15:13:56 +010095
telsoa014fcda012018-03-09 14:13:49 +000096 switch(layer.GetType())
97 {
98 case LayerType::Activation:
99 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100100 auto cLayer = PolymorphicDowncast<const ActivationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000101 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100102 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000103 result = layerSupportObject.IsActivationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100104 OverrideDataType(input, dataType),
105 OverrideDataType(output, dataType),
106 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100107 reason);
telsoa014fcda012018-03-09 14:13:49 +0000108 break;
109 }
110 case LayerType::Addition:
111 {
Mike Kelly3ec30772023-03-08 13:47:17 +0000112 ARMNN_NO_DEPRECATE_WARN_BEGIN
telsoa014fcda012018-03-09 14:13:49 +0000113 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
114 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
115 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000116 result = layerSupportObject.IsAdditionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100117 OverrideDataType(input0, dataType),
118 OverrideDataType(input1, dataType),
119 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100120 reason);
Mike Kelly3ec30772023-03-08 13:47:17 +0000121 ARMNN_NO_DEPRECATE_WARN_END
telsoa014fcda012018-03-09 14:13:49 +0000122 break;
123 }
Nikhil Rajee391d52019-09-05 17:50:44 +0100124 case LayerType::ArgMinMax:
125 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100126 auto cLayer = PolymorphicDowncast<const ArgMinMaxLayer*>(&layer);
Nikhil Rajee391d52019-09-05 17:50:44 +0100127 const ArgMinMaxDescriptor& descriptor = cLayer->GetParameters();
128
129 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
130 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000131 result = layerSupportObject.IsArgMinMaxSupported(
Nikhil Rajee391d52019-09-05 17:50:44 +0100132 OverrideDataType(input, dataType),
Narumol Prangnawaratd1f57732019-10-31 14:24:02 +0000133 OverrideDataType(output, DataType::Signed32),
Nikhil Rajee391d52019-09-05 17:50:44 +0100134 descriptor,
135 reason);
136 break;
137 }
Samuel Yap6b478092022-07-06 15:36:03 +0100138 case LayerType::BatchMatMul:
139 {
140 auto cLayer = PolymorphicDowncast<const BatchMatMulLayer*>(&layer);
141 const BatchMatMulDescriptor& descriptor = cLayer->GetParameters();
142
143 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
144 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
145 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
146 result = layerSupportObject.IsBatchMatMulSupported(
147 OverrideDataType(input0, dataType),
148 OverrideDataType(input1, dataType),
149 OverrideDataType(output, dataType),
150 descriptor,
151 reason);
152 break;
153 }
telsoa014fcda012018-03-09 14:13:49 +0000154 case LayerType::BatchNormalization:
155 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100156 auto cLayer = PolymorphicDowncast<const BatchNormalizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000157 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100158 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
159 const TensorInfo& mean = cLayer->m_Mean->GetTensorInfo();
160 const TensorInfo& var = cLayer->m_Variance->GetTensorInfo();
161 const TensorInfo& beta = cLayer->m_Beta->GetTensorInfo();
162 const TensorInfo& gamma = cLayer->m_Gamma->GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000163 result = layerSupportObject.IsBatchNormalizationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100164 OverrideDataType(input, dataType),
165 OverrideDataType(output, dataType),
166 OverrideDataType(mean, dataType),
167 OverrideDataType(var, dataType),
168 OverrideDataType(beta, dataType),
169 OverrideDataType(gamma, dataType),
170 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100171 reason);
telsoa014fcda012018-03-09 14:13:49 +0000172 break;
173 }
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000174 case LayerType::BatchToSpaceNd:
175 {
176 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
177 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Jan Eilersbb446e52020-04-02 13:56:54 +0100178 auto cLayer = PolymorphicDowncast<const BatchToSpaceNdLayer*>(&layer);
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000179
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000180 result = layerSupportObject.IsBatchToSpaceNdSupported(OverrideDataType(input, dataType),
181 OverrideDataType(output, dataType),
182 cLayer->GetParameters(),
183 reason);
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000184 break;
185 }
mathad01b392e982021-04-07 12:07:30 +0100186 case LayerType::Cast:
187 {
188 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
189 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
190
191 result = layerSupportObject.IsCastSupported(OverrideDataType(input, dataType),
192 OverrideDataType(output, dataType),
193 reason);
194 break;
195 }
Simon Obute51f67772021-09-03 15:50:13 +0100196 case LayerType::ChannelShuffle:
197 {
198 auto cLayer = PolymorphicDowncast<const ChannelShuffleLayer*>(&layer);
199
200 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
201 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
202
203 const ChannelShuffleDescriptor descriptor = cLayer->GetParameters();
204
205 result = layerSupportObject.IsChannelShuffleSupported(OverrideDataType(input, dataType),
206 OverrideDataType(output, dataType),
207 descriptor,
208 reason);
209 break;
210 }
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100211 case LayerType::Comparison:
212 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100213 auto cLayer = PolymorphicDowncast<const ComparisonLayer*>(&layer);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100214
215 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
216 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
217 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
218
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000219 result = layerSupportObject.IsComparisonSupported(OverrideDataType(input0, dataType),
220 OverrideDataType(input1, dataType),
221 OverrideDataType(output, DataType::Boolean),
222 cLayer->GetParameters(),
223 reason);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100224 break;
225 }
telsoa014fcda012018-03-09 14:13:49 +0000226 case LayerType::Constant:
227 {
228 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000229 result = layerSupportObject.IsConstantSupported(OverrideDataType(output, dataType), reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100230 break;
231 }
232 case LayerType::ConvertFp16ToFp32:
233 {
234 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
235 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000236 result = layerSupportObject.IsConvertFp16ToFp32Supported(input, output, reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100237 break;
238 }
239 case LayerType::ConvertFp32ToFp16:
240 {
241 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
242 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000243 result = layerSupportObject.IsConvertFp32ToFp16Supported(input, output, reason);
telsoa014fcda012018-03-09 14:13:49 +0000244 break;
245 }
246 case LayerType::Convolution2d:
247 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100248 auto cLayer = PolymorphicDowncast<const Convolution2dLayer*>(&layer);
arovir01a6824102018-08-28 17:40:45 +0100249
250 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
251 dataType);
telsoa01c577f2c2018-08-31 09:22:23 +0100252 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100253 ARMNN_ASSERT_MSG(layer.GetInputSlot(1).GetConnection(),
254 "Convolution2dLayer: Weights should be connected as a Constant Layer.");
255 const TensorInfo weights = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
256 dataType);
surmeh013537c2c2018-05-18 16:31:43 +0100257
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100258 const Convolution2dDescriptor& descriptor = cLayer->GetParameters();
surmeh013537c2c2018-05-18 16:31:43 +0100259
arovir01a6824102018-08-28 17:40:45 +0100260 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100261 Optional<TensorInfo> biases;
surmeh013537c2c2018-05-18 16:31:43 +0100262 if (descriptor.m_BiasEnabled)
263 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100264 ARMNN_ASSERT_MSG(layer.GetInputSlot(2).GetConnection(),
265 "Convolution2dLayer: Bias should be connected as a Constant Layer.");
266 biases = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
267 GetBiasTypeFromWeightsType(dataType));
surmeh013537c2c2018-05-18 16:31:43 +0100268 }
269
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000270 result = layerSupportObject.IsConvolution2dSupported(
surmeh013537c2c2018-05-18 16:31:43 +0100271 input,
272 output,
273 descriptor,
Keith Davisb4dd5cc2022-04-07 11:32:00 +0100274 weights,
arovir01a6824102018-08-28 17:40:45 +0100275 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100276 reason);
telsoa014fcda012018-03-09 14:13:49 +0000277 break;
278 }
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100279 case LayerType::Convolution3d:
280 {
281 auto cLayer = PolymorphicDowncast<const Convolution3dLayer*>(&layer);
282
283 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
284 dataType);
285 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100286
287 ARMNN_ASSERT_MSG(layer.GetInputSlot(1).GetConnection(),
288 "Convolution3dLayer: Weights should be connected as a Constant Layer.");
289 const TensorInfo weights = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
290 dataType);
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100291
292 const Convolution3dDescriptor& descriptor = cLayer->GetParameters();
293
294 // Construct optional biases object based on the value of m_BiasEnabled
295 Optional<TensorInfo> biases;
296 if (descriptor.m_BiasEnabled)
297 {
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100298 biases = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
299 GetBiasTypeFromWeightsType(dataType));
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100300 }
301
302 result = layerSupportObject.IsConvolution3dSupported(
303 input,
304 output,
305 descriptor,
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100306 weights,
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100307 biases,
308 reason);
309 break;
310 }
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000311 case LayerType::Debug:
312 {
313 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
314 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
315
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000316 result = layerSupportObject.IsDebugSupported(OverrideDataType(input, dataType),
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000317 OverrideDataType(output, dataType),
318 reason);
319 break;
320 }
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100321 case LayerType::DepthToSpace:
322 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100323 auto cLayer = PolymorphicDowncast<const DepthToSpaceLayer*>(&layer);
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100324
325 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
326 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
327
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000328 result = layerSupportObject.IsDepthToSpaceSupported(OverrideDataType(input, dataType),
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100329 OverrideDataType(output, dataType),
330 cLayer->GetParameters(),
331 reason);
332 break;
333 }
telsoa014fcda012018-03-09 14:13:49 +0000334 case LayerType::DepthwiseConvolution2d:
335 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100336 auto cLayer = PolymorphicDowncast<const DepthwiseConvolution2dLayer*>(&layer);
Cathal Corbett06902652022-04-14 17:55:11 +0100337 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
338 dataType);
339 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
340 const TensorInfo& weights = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
341 dataType);
342
343 ARMNN_ASSERT(cLayer->GetInputSlot(1).GetConnection() != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100344
telsoa01c577f2c2018-08-31 09:22:23 +0100345 const DepthwiseConvolution2dDescriptor& descriptor = cLayer->GetParameters();
arovir01a6824102018-08-28 17:40:45 +0100346
347 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100348 Optional<TensorInfo> biases;
telsoa01c577f2c2018-08-31 09:22:23 +0100349 if (descriptor.m_BiasEnabled)
350 {
Cathal Corbett06902652022-04-14 17:55:11 +0100351 biases = OverrideDataType(cLayer->GetInputSlot(2).GetConnection()->GetTensorInfo(),
352 GetBiasTypeFromWeightsType(dataType));
telsoa01c577f2c2018-08-31 09:22:23 +0100353 }
telsoa01c577f2c2018-08-31 09:22:23 +0100354
Cathal Corbett06902652022-04-14 17:55:11 +0100355 result = layerSupportObject.IsDepthwiseConvolutionSupported(input,
356 output,
357 descriptor,
358 weights,
359 biases,
360 reason);
telsoa014fcda012018-03-09 14:13:49 +0000361 break;
362 }
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000363 case LayerType::Dequantize:
364 {
365 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
366 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
367
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000368 result = layerSupportObject.IsDequantizeSupported(input,
369 OverrideDataType(output, dataType),
370 reason);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000371 break;
372 }
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000373 case LayerType::DetectionPostProcess:
374 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100375 auto cLayer = PolymorphicDowncast<const DetectionPostProcessLayer*>(&layer);
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000376 const TensorInfo& boxEncodings = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
377 const TensorInfo& scores = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
378 const TensorInfo& anchors = cLayer->m_Anchors->GetTensorInfo();
379
380 const TensorInfo& detectionBoxes = layer.GetOutputSlot(0).GetTensorInfo();
381 const TensorInfo& detectionClasses = layer.GetOutputSlot(1).GetTensorInfo();
382 const TensorInfo& detectionScores = layer.GetOutputSlot(2).GetTensorInfo();
383 const TensorInfo& numDetections = layer.GetOutputSlot(3).GetTensorInfo();
384
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000385 const DetectionPostProcessDescriptor& descriptor = cLayer->GetParameters();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000386 result = layerSupportObject.IsDetectionPostProcessSupported(boxEncodings,
387 scores,
388 anchors,
389 detectionBoxes,
390 detectionClasses,
391 detectionScores,
392 numDetections,
393 descriptor,
394 reason);
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000395 break;
396 }
Mike Kelly3ec30772023-03-08 13:47:17 +0000397 case LayerType::ElementwiseBinary:
398 {
399 auto cLayer = PolymorphicDowncast<const ElementwiseBinaryLayer*>(&layer);
400
401 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
402 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
403 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
404 std::vector<TensorInfo> infos = { OverrideDataType(input0, dataType),
405 OverrideDataType(input1, dataType),
406 OverrideDataType(output, dataType) };
407 result = layerSupport->IsLayerSupported(LayerType::ElementwiseBinary,
408 infos,
409 cLayer->GetParameters(),
410 EmptyOptional(),
411 EmptyOptional(),
412 reason);
413 break;
414 }
josh minor4a3c6102020-01-06 16:40:46 -0600415 case LayerType::ElementwiseUnary:
416 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100417 auto cLayer = PolymorphicDowncast<const ElementwiseUnaryLayer*>(&layer);
josh minor4a3c6102020-01-06 16:40:46 -0600418
419 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
420 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
421
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000422 result = layerSupportObject.IsElementwiseUnarySupported(OverrideDataType(input, dataType),
423 OverrideDataType(output, dataType),
424 cLayer->GetParameters(),
425 reason);
josh minor4a3c6102020-01-06 16:40:46 -0600426 break;
427 }
Ryan OSheaec6c6802020-06-05 17:17:06 +0100428 case LayerType::Fill:
429 {
430 auto cLayer = PolymorphicDowncast<const FillLayer*>(&layer);
431 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
432 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
433 const FillDescriptor& descriptor = cLayer->GetParameters();
434
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000435 result = layerSupportObject.IsFillSupported(
Ryan OSheaec6c6802020-06-05 17:17:06 +0100436 OverrideDataType(input, dataType),
437 OverrideDataType(output, dataType),
438 descriptor,
439 reason);
440 break;
441 }
telsoa014fcda012018-03-09 14:13:49 +0000442 case LayerType::FakeQuantization:
443 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100444 auto cLayer = PolymorphicDowncast<const FakeQuantizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000445 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000446 result = layerSupportObject.IsFakeQuantizationSupported(OverrideDataType(input, dataType),
447 cLayer->GetParameters(),
448 reason);
telsoa014fcda012018-03-09 14:13:49 +0000449 break;
450 }
451 case LayerType::Floor:
452 {
453 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
454 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000455 result = layerSupportObject.IsFloorSupported(OverrideDataType(input, dataType),
456 OverrideDataType(output, dataType),
457 reason);
telsoa014fcda012018-03-09 14:13:49 +0000458 break;
459 }
460 case LayerType::FullyConnected:
461 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100462 auto cLayer = PolymorphicDowncast<const FullyConnectedLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000463 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100464 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000465
466 const FullyConnectedDescriptor& descriptor = cLayer->GetParameters();
467 TensorInfo weightsInfo;
468 const TensorInfo* weightsInfoPtr = nullptr;
469
Matthew Sloyan81beae32021-07-13 19:46:11 +0100470 weightsInfo = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(), dataType);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000471 weightsInfoPtr = &weightsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100472
473 TensorInfo biasInfo;
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000474 const TensorInfo* biasInfoPtr = nullptr;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000475 static const TensorInfo dummyBFloat16Bias(TensorShape({1,1,1,1}), DataType::BFloat16);
telsoa01c577f2c2018-08-31 09:22:23 +0100476 static const TensorInfo dummyFloat16Bias(TensorShape({1,1,1,1}), DataType::Float16);
477 static const TensorInfo dummyFloat32Bias(TensorShape({1,1,1,1}), DataType::Float32);
478 static const TensorInfo dummyQA8Bias(TensorShape({1,1,1,1}), DataType::Signed32);
479
telsoa01c577f2c2018-08-31 09:22:23 +0100480 if (descriptor.m_BiasEnabled)
481 {
Matthew Sloyan81beae32021-07-13 19:46:11 +0100482 biasInfo = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(), dataType);
483 biasInfoPtr = &biasInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100484 }
485 else
486 {
487 // If biases are not enabled pass a dummy tensorinfo for the validation
488 switch(input.GetDataType())
489 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000490 case DataType::BFloat16:
491 {
492 biasInfoPtr = &dummyBFloat16Bias;
493 break;
494 }
telsoa01c577f2c2018-08-31 09:22:23 +0100495 case DataType::Float16:
496 {
497 biasInfoPtr = &dummyFloat16Bias;
498 break;
499 }
500 case DataType::Float32:
501 {
502 biasInfoPtr = &dummyFloat32Bias;
503 break;
504 }
Derek Lambertif90c56d2020-01-10 17:14:08 +0000505 case DataType::QAsymmU8:
Keith Davisa8565012020-02-14 12:22:40 +0000506 case DataType::QAsymmS8:
Keith Davis9d0ff742020-02-03 14:47:54 +0000507 case DataType::QSymmS8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000508 case DataType::QSymmS16:
telsoa01c577f2c2018-08-31 09:22:23 +0100509 {
510 biasInfoPtr = &dummyQA8Bias;
511 break;
512 }
513 default:
514 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100515 ARMNN_ASSERT_MSG(false, "Unexpected bias type");
telsoa01c577f2c2018-08-31 09:22:23 +0100516 }
517 }
518 }
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000519 result = layerSupportObject.IsFullyConnectedSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100520 OverrideDataType(input, dataType),
521 OverrideDataType(output, dataType),
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000522 *weightsInfoPtr,
telsoa01c577f2c2018-08-31 09:22:23 +0100523 *biasInfoPtr,
524 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100525 reason);
telsoa014fcda012018-03-09 14:13:49 +0000526 break;
527 }
narpra01b89b05f2019-01-16 09:53:09 +0000528 case LayerType::Gather:
529 {
530 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
531 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
532 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Teresa Charlin52664732020-06-29 16:27:03 +0100533 auto cLayer = PolymorphicDowncast<const GatherLayer*>(&layer);
534 const GatherDescriptor& descriptor = cLayer->GetParameters();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000535 result = layerSupportObject.IsGatherSupported(OverrideDataType(input0, dataType),
536 input1,
537 OverrideDataType(output, dataType),
538 descriptor,
539 reason);
narpra01b89b05f2019-01-16 09:53:09 +0000540 break;
541 }
Teresa Charlinb2d3ec52022-04-12 22:07:09 +0100542 case LayerType::GatherNd:
543 {
544 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
545 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
546 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
547 result = layerSupportObject.IsGatherNdSupported(OverrideDataType(input0, dataType),
548 input1,
549 OverrideDataType(output, dataType),
550 reason);
551 break;
552 }
telsoa014fcda012018-03-09 14:13:49 +0000553 case LayerType::Input:
554 {
555 const TensorInfo& input = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000556 result = layerSupportObject.IsInputSupported(OverrideDataType(input, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000557 break;
558 }
Kevin Mayce5045a2019-10-02 14:07:47 +0100559 case LayerType::InstanceNormalization:
560 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100561 auto cLayer = PolymorphicDowncast<const InstanceNormalizationLayer*>(&layer);
Kevin Mayce5045a2019-10-02 14:07:47 +0100562 const InstanceNormalizationDescriptor& descriptor = cLayer->GetParameters();
563
564 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
565 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
566
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000567 result = layerSupportObject.IsInstanceNormalizationSupported(
Kevin Mayce5045a2019-10-02 14:07:47 +0100568 OverrideDataType(input, dataType),
569 OverrideDataType(output, dataType),
570 descriptor,
571 reason);
572 break;
573 }
telsoa014fcda012018-03-09 14:13:49 +0000574 case LayerType::L2Normalization:
575 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100576 auto cLayer = PolymorphicDowncast<const L2NormalizationLayer*>(&layer);
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100577 const L2NormalizationDescriptor& descriptor = cLayer->GetParameters();
578
telsoa014fcda012018-03-09 14:13:49 +0000579 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100580 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100581
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000582 result = layerSupportObject.IsL2NormalizationSupported(
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100583 OverrideDataType(input, dataType),
584 OverrideDataType(output, dataType),
585 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100586 reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100587 break;
588 }
James Conroyaba90cd2020-11-06 16:28:18 +0000589 case LayerType::LogicalBinary:
590 {
591 auto cLayer = PolymorphicDowncast<const LogicalBinaryLayer*>(&layer);
592
593 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
594 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
595 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
596
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000597 result = layerSupportObject.IsLogicalBinarySupported(input0,
598 input1,
599 output,
600 cLayer->GetParameters(),
601 reason);
James Conroyaba90cd2020-11-06 16:28:18 +0000602 break;
603 }
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100604 case LayerType::LogSoftmax:
605 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100606 auto cLayer = PolymorphicDowncast<const LogSoftmaxLayer*>(&layer);
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100607
608 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
609 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
610
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000611 result = layerSupportObject.IsLogSoftmaxSupported(OverrideDataType(input, dataType),
612 OverrideDataType(output, dataType),
613 cLayer->GetParameters(),
614 reason);
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100615 break;
616 }
telsoa01c577f2c2018-08-31 09:22:23 +0100617 case LayerType::Lstm:
618 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100619 auto cLayer = PolymorphicDowncast<const LstmLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100620 const LstmDescriptor& descriptor = cLayer->GetParameters();
621
622 // All inputs.
623 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
624 dataType);
625 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
626 dataType);
627 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
628 dataType);
629 // All outputs
630 const TensorInfo& scratchBuffer = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
631 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
632 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
633 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(3).GetTensorInfo(), dataType);
634
635 // Basic parameters
636 const TensorInfo& inputToForgetWeights
637 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
638 const TensorInfo& inputToCellWeights
639 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
640 const TensorInfo& inputToOutputWeights
641 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
642 const TensorInfo& recurrentToForgetWeights
643 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
644 const TensorInfo& recurrentToCellWeights
645 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
646 const TensorInfo& recurrentToOutputWeights
647 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
648 const TensorInfo& forgetGateBias
649 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
650 const TensorInfo& cellBias
651 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
652 const TensorInfo& outputGateBias
653 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
654
Jan Eilersd01a83c2019-07-03 18:20:40 +0100655 LstmInputParamsInfo paramsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100656
Jan Eilersd01a83c2019-07-03 18:20:40 +0100657 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
658 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
659 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
660 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
661 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
662 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
663 paramsInfo.m_ForgetGateBias = &forgetGateBias;
664 paramsInfo.m_CellBias = &cellBias;
665 paramsInfo.m_OutputGateBias = &outputGateBias;
666
667
668 // Optional parameters
telsoa01c577f2c2018-08-31 09:22:23 +0100669 TensorInfo optInputToInputWeights;
670 TensorInfo optRecurrentToInputWeights;
671 TensorInfo optCellToInputWeights;
672 TensorInfo optInputGateBias;
673 TensorInfo optProjectionWeights;
674 TensorInfo optProjectionBias;
675 TensorInfo optCellToForgetWeights;
676 TensorInfo optCellToOutputWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100677 TensorInfo optInputLayerNormWeights;
678 TensorInfo optForgetLayerNormWeights;
679 TensorInfo optCellLayerNormWeights;
680 TensorInfo optOutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100681
682 if(!descriptor.m_CifgEnabled)
683 {
684 optInputToInputWeights =
685 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100686 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100687
688 optRecurrentToInputWeights =
689 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100690 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100691 optInputGateBias =
692 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100693 paramsInfo.m_InputGateBias = &optInputGateBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100694 }
695
696 if(descriptor.m_ProjectionEnabled)
697 {
698 optProjectionWeights =
699 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100700 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100701 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
702 {
703 optProjectionBias =
704 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100705 paramsInfo.m_ProjectionBias = &optProjectionBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100706 }
707 }
708
709 if(descriptor.m_PeepholeEnabled)
710 {
Jan Eilerse2062cd2020-03-30 15:07:45 +0100711 if(!descriptor.m_CifgEnabled)
712 {
713 optCellToInputWeights =
714 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
715 dataType);
716 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
717 }
telsoa01c577f2c2018-08-31 09:22:23 +0100718 optCellToForgetWeights =
719 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100720 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100721 optCellToOutputWeights =
722 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100723 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100724 }
725
Jan Eilers38e05bd2019-06-26 13:10:09 +0100726 if(descriptor.m_LayerNormEnabled)
727 {
Ferran Balaguere30c16e2019-07-24 17:03:45 +0100728 if (!descriptor.m_CifgEnabled)
729 {
730 optInputLayerNormWeights = OverrideDataType(
731 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
732 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
733 }
Jan Eilers38e05bd2019-06-26 13:10:09 +0100734
735 optForgetLayerNormWeights = OverrideDataType(
736 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100737 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100738
739 optCellLayerNormWeights = OverrideDataType(
740 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100741 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100742
743 optOutputLayerNormWeights = OverrideDataType(
744 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100745 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100746 }
747
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000748 result = layerSupportObject.IsLstmSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100749 input,
750 outputStateIn,
751 cellStateIn,
752 scratchBuffer,
753 outputStateOut,
754 cellStateOut,
755 output,
756 descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +0100757 paramsInfo,
758 reason);
telsoa014fcda012018-03-09 14:13:49 +0000759 break;
760 }
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000761 case LayerType::Maximum:
762 {
Mike Kelly3ec30772023-03-08 13:47:17 +0000763 ARMNN_NO_DEPRECATE_WARN_BEGIN
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000764 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
765 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
766 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
767
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000768 result = layerSupportObject.IsMaximumSupported(OverrideDataType(input0, dataType),
769 OverrideDataType(input1, dataType),
770 OverrideDataType(output, dataType),
771 reason);
Mike Kelly3ec30772023-03-08 13:47:17 +0000772 ARMNN_NO_DEPRECATE_WARN_END
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000773 break;
774 }
narpra01b89b05f2019-01-16 09:53:09 +0000775 case LayerType::MemCopy:
776 {
777 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
778 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000779
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000780 result = layerSupportObject.IsMemCopySupported(OverrideDataType(input, dataType),
781 OverrideDataType(output, dataType),
782 reason);
narpra01b89b05f2019-01-16 09:53:09 +0000783 break;
784 }
Derek Lambertif674aa02019-08-01 15:56:25 +0100785 case LayerType::MemImport:
786 {
787 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
788 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
789
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000790 result = layerSupportObject.IsMemImportSupported(OverrideDataType(input, dataType),
791 OverrideDataType(output, dataType),
792 reason);
Derek Lambertif674aa02019-08-01 15:56:25 +0100793 break;
794 }
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100795 case LayerType::Merge:
796 {
797 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
798 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
799 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
800
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000801 result = layerSupportObject.IsMergeSupported(OverrideDataType(input0, dataType),
802 OverrideDataType(input1, dataType),
803 OverrideDataType(output, dataType),
804 reason);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100805 break;
806 }
Jim Flynne242f2d2019-05-22 14:24:13 +0100807 case LayerType::Concat:
telsoa014fcda012018-03-09 14:13:49 +0000808 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100809 auto cLayer = PolymorphicDowncast<const ConcatLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000810
telsoa01c577f2c2018-08-31 09:22:23 +0100811 // Get vector of all inputs.
812 auto getTensorInfo = [&dataType](const InputSlot& slot)
telsoa014fcda012018-03-09 14:13:49 +0000813 {
telsoa01c577f2c2018-08-31 09:22:23 +0100814 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
telsoa014fcda012018-03-09 14:13:49 +0000815 };
Finn Williams3e54d032020-10-22 16:53:35 +0100816
817 auto beginI = MakeTransformIterator(layer.GetInputSlots().begin(), getTensorInfo);
818 auto endI = MakeTransformIterator(layer.GetInputSlots().end(), getTensorInfo);
telsoa01c577f2c2018-08-31 09:22:23 +0100819 std::vector<TensorInfo> inputs(beginI, endI);
telsoa014fcda012018-03-09 14:13:49 +0000820
telsoa01c577f2c2018-08-31 09:22:23 +0100821 auto getTensorInfoPtr = [](const TensorInfo& info)
822 {
823 return &info;
824 };
Finn Williams3e54d032020-10-22 16:53:35 +0100825
826 auto beginPtr = MakeTransformIterator(inputs.begin(), getTensorInfoPtr);
827 auto endPtr = MakeTransformIterator(inputs.end(), getTensorInfoPtr);
telsoa01c577f2c2018-08-31 09:22:23 +0100828 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
telsoa014fcda012018-03-09 14:13:49 +0000829
Nikhil Raj8599a412018-11-19 14:51:07 +0000830 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
831
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000832 result = layerSupportObject.IsConcatSupported(inputPtrs, output, cLayer->GetParameters(), reason);
Jim Flynne242f2d2019-05-22 14:24:13 +0100833
834
telsoa014fcda012018-03-09 14:13:49 +0000835 break;
836 }
837 case LayerType::Multiplication:
838 {
Mike Kelly3ec30772023-03-08 13:47:17 +0000839 ARMNN_NO_DEPRECATE_WARN_BEGIN
telsoa014fcda012018-03-09 14:13:49 +0000840 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
841 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100842 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000843 result = layerSupportObject.IsMultiplicationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100844 OverrideDataType(input0, dataType),
845 OverrideDataType(input1, dataType),
846 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100847 reason);
Mike Kelly3ec30772023-03-08 13:47:17 +0000848 ARMNN_NO_DEPRECATE_WARN_END
telsoa014fcda012018-03-09 14:13:49 +0000849 break;
850 }
851 case LayerType::Normalization:
852 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100853 auto cLayer = PolymorphicDowncast<const NormalizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000854 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
855 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000856 result = layerSupportObject.IsNormalizationSupported(OverrideDataType(input, dataType),
857 OverrideDataType(output, dataType),
858 cLayer->GetParameters(),
859 reason);
telsoa014fcda012018-03-09 14:13:49 +0000860 break;
861 }
862 case LayerType::Output:
863 {
864 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000865 result = layerSupportObject.IsOutputSupported(OverrideDataType(output, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000866 break;
867 }
868 case LayerType::Permute:
869 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100870 auto cLayer = PolymorphicDowncast<const PermuteLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000871 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
872 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000873 result = layerSupportObject.IsPermuteSupported(OverrideDataType(input, dataType),
874 OverrideDataType(output, dataType),
875 cLayer->GetParameters(),
876 reason);
telsoa014fcda012018-03-09 14:13:49 +0000877 break;
878 }
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100879 case LayerType::Pad:
880 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100881 auto cLayer = PolymorphicDowncast<const PadLayer*>(&layer);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100882 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
883 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000884 result = layerSupportObject.IsPadSupported(
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100885 OverrideDataType(input, dataType),
886 OverrideDataType(output, dataType),
887 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100888 reason);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100889 break;
890 }
telsoa014fcda012018-03-09 14:13:49 +0000891 case LayerType::Pooling2d:
892 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100893 auto cLayer = PolymorphicDowncast<const Pooling2dLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000894 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
895 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000896 result = layerSupportObject.IsPooling2dSupported(OverrideDataType(input, dataType),
897 OverrideDataType(output, dataType),
898 cLayer->GetParameters(),
899 reason);
telsoa014fcda012018-03-09 14:13:49 +0000900 break;
901 }
Tamás Nyíri7b885b32021-10-26 14:47:57 +0100902 case LayerType::Pooling3d:
903 {
904 auto cLayer = PolymorphicDowncast<const Pooling3dLayer*>(&layer);
905 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
906 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
907 result = layerSupportObject.IsPooling3dSupported(OverrideDataType(input, dataType),
908 OverrideDataType(output, dataType),
909 cLayer->GetParameters(),
910 reason);
911 break;
912 }
Matteo Martincigh49124022019-01-11 13:25:59 +0000913 case LayerType::PreCompiled:
914 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100915 auto cLayer = PolymorphicDowncast<const PreCompiledLayer*>(&layer);
Matteo Martincigh49124022019-01-11 13:25:59 +0000916 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000917 result = layerSupportObject.IsPreCompiledSupported(OverrideDataType(input, dataType),
918 cLayer->GetParameters(),
919 reason);
Matteo Martincigh49124022019-01-11 13:25:59 +0000920 break;
921 }
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000922 case LayerType::Quantize:
923 {
924 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
925 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +0000926 result = layerSupportObject.IsQuantizeSupported(input, output, reason);
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000927 break;
928 }
James Conroy586a9aa2020-03-20 08:49:33 +0000929 case LayerType::QLstm:
930 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100931 auto cLayer = PolymorphicDowncast<const QLstmLayer*>(&layer);
James Conroy586a9aa2020-03-20 08:49:33 +0000932 const QLstmDescriptor& descriptor = cLayer->GetParameters();
933
934 // Inputs
935 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
936 const TensorInfo& previousOutputIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
937 const TensorInfo& previousCellStateIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
938
939 // Outputs
940 const TensorInfo& outputStateOut = layer.GetOutputSlot(0).GetTensorInfo();
941 const TensorInfo& cellStateOut = layer.GetOutputSlot(1).GetTensorInfo();
942 const TensorInfo& output = layer.GetOutputSlot(2).GetTensorInfo();
943
944 // Lstm parameters
945 LstmInputParamsInfo paramsInfo;
946
947 // Basic parameters
Matthew Bentham6f24b1a2021-06-29 15:18:32 +0100948 ARMNN_ASSERT(cLayer->m_BasicParameters.m_InputToForgetWeights.get() != nullptr);
949 ARMNN_ASSERT(cLayer->m_BasicParameters.m_InputToCellWeights.get() != nullptr);
950 ARMNN_ASSERT(cLayer->m_BasicParameters.m_InputToOutputWeights.get() != nullptr);
James Conroy586a9aa2020-03-20 08:49:33 +0000951 paramsInfo.m_InputToForgetWeights = &cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo();
952 paramsInfo.m_InputToCellWeights = &cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo();
953 paramsInfo.m_InputToOutputWeights = &cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo();
954
955 paramsInfo.m_RecurrentToForgetWeights =
956 &cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo();
957 paramsInfo.m_RecurrentToCellWeights =
958 &cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo();
959 paramsInfo.m_RecurrentToOutputWeights =
960 &cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo();
961
962 paramsInfo.m_ForgetGateBias = &cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo();
963 paramsInfo.m_CellBias = &cLayer->m_BasicParameters.m_CellBias->GetTensorInfo();
964 paramsInfo.m_OutputGateBias = &cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo();
965
966 if(!descriptor.m_CifgEnabled)
967 {
968 paramsInfo.m_InputToInputWeights = &cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo();
969 paramsInfo.m_RecurrentToInputWeights =
970 &cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo();
971 paramsInfo.m_InputGateBias = &cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo();
972 }
973
974 if(descriptor.m_ProjectionEnabled)
975 {
976 paramsInfo.m_ProjectionWeights = &cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo();
James Conroyed324052020-05-18 15:16:42 +0100977
978 // Projection bias is optional even if projection is enabled
979 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
980 {
981 paramsInfo.m_ProjectionBias = &cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo();
982 }
James Conroy586a9aa2020-03-20 08:49:33 +0000983 }
984
985 if(descriptor.m_PeepholeEnabled)
986 {
987 if (!descriptor.m_CifgEnabled)
988 {
989 paramsInfo.m_CellToInputWeights =
990 &cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo();
991 }
992
993 paramsInfo.m_CellToForgetWeights =
994 &cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo();
995 paramsInfo.m_CellToOutputWeights = &cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo();
996 }
997
998 if(descriptor.m_LayerNormEnabled)
999 {
1000 if (!descriptor.m_CifgEnabled)
1001 {
1002 paramsInfo.m_InputLayerNormWeights =
1003 &cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo();
1004 }
1005
1006 paramsInfo.m_ForgetLayerNormWeights =
1007 &cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo();
1008 paramsInfo.m_CellLayerNormWeights =
1009 &cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo();
1010 paramsInfo.m_OutputLayerNormWeights =
1011 &cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo();
1012 }
1013
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001014 result = layerSupportObject.IsQLstmSupported(input,
1015 previousOutputIn,
1016 previousCellStateIn,
1017 outputStateOut,
1018 cellStateOut,
1019 output,
1020 descriptor,
1021 paramsInfo,
1022 reason);
James Conroy586a9aa2020-03-20 08:49:33 +00001023 break;
1024 }
James Conroyee18dc82019-07-17 11:27:46 +01001025 case LayerType::QuantizedLstm:
1026 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001027 auto cLayer = PolymorphicDowncast<const QuantizedLstmLayer*>(&layer);
James Conroyee18dc82019-07-17 11:27:46 +01001028
1029 // Inputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001030 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1031 const TensorInfo& previousCellStateIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1032 const TensorInfo& previousOutputIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +01001033
1034 // Outputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001035 const TensorInfo& cellStateOut = layer.GetOutputSlot(0).GetTensorInfo();
1036 const TensorInfo& output = layer.GetOutputSlot(1).GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +01001037
1038 // QuantizedLstm parameters
James Conroyee18dc82019-07-17 11:27:46 +01001039 QuantizedLstmInputParamsInfo paramsInfo;
1040
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001041 paramsInfo.m_InputToInputWeights =
1042 &cLayer->m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo();
1043 paramsInfo.m_InputToForgetWeights =
1044 &cLayer->m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo();
1045 paramsInfo.m_InputToCellWeights =
1046 &cLayer->m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo();
1047 paramsInfo.m_InputToOutputWeights =
1048 &cLayer->m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +01001049
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001050 paramsInfo.m_RecurrentToInputWeights =
1051 &cLayer->m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo();
1052 paramsInfo.m_RecurrentToForgetWeights =
1053 &cLayer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo();
1054 paramsInfo.m_RecurrentToCellWeights =
1055 &cLayer->m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo();
1056 paramsInfo.m_RecurrentToOutputWeights =
1057 &cLayer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +01001058
Ferran Balaguer737d9ff2019-08-01 09:58:08 +01001059 paramsInfo.m_InputGateBias =
1060 &cLayer->m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo();
1061 paramsInfo.m_ForgetGateBias =
1062 &cLayer->m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo();
1063 paramsInfo.m_CellBias =
1064 &cLayer->m_QuantizedLstmParameters.m_CellBias->GetTensorInfo();
1065 paramsInfo.m_OutputGateBias =
1066 &cLayer->m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo();;
James Conroyee18dc82019-07-17 11:27:46 +01001067
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001068 result = layerSupportObject.IsQuantizedLstmSupported(input,
1069 previousCellStateIn,
1070 previousOutputIn,
1071 cellStateOut,
1072 output,
1073 paramsInfo,
1074 reason);
James Conroyee18dc82019-07-17 11:27:46 +01001075 break;
1076 }
Francis Murtaghe7a86a42018-08-29 12:42:10 +01001077 case LayerType::Division:
1078 {
Mike Kelly3ec30772023-03-08 13:47:17 +00001079 ARMNN_NO_DEPRECATE_WARN_BEGIN
Francis Murtaghe7a86a42018-08-29 12:42:10 +01001080 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1081 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1082 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001083 result = layerSupportObject.IsDivisionSupported(
Francis Murtaghe7a86a42018-08-29 12:42:10 +01001084 OverrideDataType(input0, dataType),
1085 OverrideDataType(input1, dataType),
1086 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +01001087 reason);
Mike Kelly3ec30772023-03-08 13:47:17 +00001088 ARMNN_NO_DEPRECATE_WARN_END
Francis Murtaghe7a86a42018-08-29 12:42:10 +01001089 break;
1090 }
Finn Williams2605b232020-06-10 15:53:46 +01001091 case LayerType::Rank:
1092 {
1093 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1094 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001095 result = layerSupportObject.IsRankSupported(OverrideDataType(input, dataType),
1096 OverrideDataType(output, dataType),
1097 reason);
Finn Williams2605b232020-06-10 15:53:46 +01001098 break;
1099 }
telsoa014fcda012018-03-09 14:13:49 +00001100 case LayerType::Reshape:
1101 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001102 auto cLayer = PolymorphicDowncast<const ReshapeLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +00001103 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Kevin Maya023c402019-12-12 17:28:05 +00001104 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001105 result = layerSupportObject.IsReshapeSupported(OverrideDataType(input, dataType),
1106 OverrideDataType(output, dataType),
1107 cLayer->GetParameters(),
1108 reason);
telsoa014fcda012018-03-09 14:13:49 +00001109 break;
1110 }
Teresa Charlina9075df2019-06-27 15:41:57 +01001111 case LayerType::Resize:
1112 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001113 auto cLayer = PolymorphicDowncast<const ResizeLayer*>(&layer);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001114 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Teresa Charlina9075df2019-06-27 15:41:57 +01001115 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001116 result = layerSupportObject.IsResizeSupported(OverrideDataType(input, dataType),
1117 OverrideDataType(output, dataType),
1118 cLayer->GetParameters(),
1119 reason);
Teresa Charlina9075df2019-06-27 15:41:57 +01001120 break;
1121 }
Keith Davis3ae3f972021-05-21 16:33:48 +01001122 case LayerType::Shape:
1123 {
1124 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1125 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1126
1127 result = layerSupportObject.IsShapeSupported(OverrideDataType(input, dataType),
1128 OverrideDataType(output, dataType),
1129 reason);
1130 break;
1131 }
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001132 case LayerType::Slice:
1133 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001134 auto cLayer = PolymorphicDowncast<const SliceLayer*>(&layer);
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001135
1136 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1137 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1138
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001139 result = layerSupportObject.IsSliceSupported(OverrideDataType(input, dataType),
1140 OverrideDataType(output, dataType),
1141 cLayer->GetParameters(),
1142 reason);
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001143 break;
1144 }
telsoa014fcda012018-03-09 14:13:49 +00001145 case LayerType::Softmax:
1146 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001147 auto cLayer = PolymorphicDowncast<const SoftmaxLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +00001148 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +01001149 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001150 result = layerSupportObject.IsSoftmaxSupported(OverrideDataType(input, dataType),
1151 OverrideDataType(output, dataType),
1152 cLayer->GetParameters(),
1153 reason);
telsoa014fcda012018-03-09 14:13:49 +00001154 break;
1155 }
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +00001156 case LayerType::SpaceToBatchNd:
1157 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001158 auto cLayer = PolymorphicDowncast<const SpaceToBatchNdLayer*>(&layer);
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +00001159 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1160 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001161 result = layerSupportObject.IsSpaceToBatchNdSupported(OverrideDataType(input, dataType),
1162 OverrideDataType(output, dataType),
1163 cLayer->GetParameters(),
1164 reason);
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +00001165 break;
1166 }
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001167 case LayerType::SpaceToDepth:
1168 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001169 auto cLayer = PolymorphicDowncast<const SpaceToDepthLayer*>(&layer);
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001170
1171 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1172 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1173
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001174 result = layerSupportObject.IsSpaceToDepthSupported(OverrideDataType(input, dataType),
1175 OverrideDataType(output, dataType),
1176 cLayer->GetParameters(),
1177 reason);
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001178 break;
1179 }
telsoa014fcda012018-03-09 14:13:49 +00001180 case LayerType::Splitter:
1181 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001182 auto cLayer = PolymorphicDowncast<const SplitterLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +00001183 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001184
1185 // Get vector of all outputs.
1186 auto getTensorInfo = [&dataType](const OutputSlot& slot)
1187 {
1188 return OverrideDataType(slot.GetTensorInfo(), dataType);
1189 };
Finn Williams3e54d032020-10-22 16:53:35 +01001190 auto beginI = MakeTransformIterator(layer.GetOutputSlots().begin(), getTensorInfo);
1191 auto endI = MakeTransformIterator(layer.GetOutputSlots().end(), getTensorInfo);
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001192 std::vector<TensorInfo> outputs(beginI, endI);
1193
1194 const std::vector<std::reference_wrapper<TensorInfo>> outputPtrs(outputs.begin(), outputs.end());
1195
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001196 result = layerSupportObject.IsSplitterSupported(OverrideDataType(input, dataType),
1197 outputPtrs,
1198 cLayer->GetParameters(),
1199 reason);
telsoa014fcda012018-03-09 14:13:49 +00001200 break;
1201 }
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001202 case LayerType::Stack:
1203 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001204 auto cLayer = PolymorphicDowncast<const StackLayer*>(&layer);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001205
1206 // Get vector of all inputs.
1207 auto getTensorInfo = [&dataType](const InputSlot& slot)
1208 {
1209 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
1210 };
Finn Williams3e54d032020-10-22 16:53:35 +01001211 auto beginI = MakeTransformIterator(layer.GetInputSlots().begin(), getTensorInfo);
1212 auto endI = MakeTransformIterator(layer.GetInputSlots().end(), getTensorInfo);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001213 std::vector<TensorInfo> inputs(beginI, endI);
1214
1215 auto getTensorInfoPtr = [](const TensorInfo& info)
1216 {
1217 return &info;
1218 };
Finn Williams3e54d032020-10-22 16:53:35 +01001219 auto beginPtr = MakeTransformIterator(inputs.begin(), getTensorInfoPtr);
1220 auto endPtr = MakeTransformIterator(inputs.end(), getTensorInfoPtr);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001221 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
1222
1223 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1224
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001225 result = layerSupportObject.IsStackSupported(inputPtrs, output, cLayer->GetParameters(), reason);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001226
1227 break;
1228 }
Derek Lamberti013c3902019-10-21 10:46:16 +01001229 case LayerType::StandIn:
1230 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001231 auto cLayer = PolymorphicDowncast<const StandInLayer*>(&layer);
Derek Lamberti013c3902019-10-21 10:46:16 +01001232
1233 // Get vector of all inputs.
1234 auto getTensorInfoIn = [&dataType](const InputSlot& slot)
1235 {
1236 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
1237 };
1238 auto getTensorInfoOut = [&dataType](const OutputSlot& slot)
1239 {
1240 return OverrideDataType(slot.GetTensorInfo(), dataType);
1241 };
Finn Williams3e54d032020-10-22 16:53:35 +01001242 auto beginI = MakeTransformIterator(layer.GetInputSlots().begin(), getTensorInfoIn);
1243 auto endI = MakeTransformIterator(layer.GetInputSlots().end(), getTensorInfoIn);
Derek Lamberti013c3902019-10-21 10:46:16 +01001244 std::vector<TensorInfo> inputs(beginI, endI);
1245
Finn Williams3e54d032020-10-22 16:53:35 +01001246 auto beginO = MakeTransformIterator(layer.GetOutputSlots().begin(), getTensorInfoOut);
1247 auto endO = MakeTransformIterator(layer.GetOutputSlots().end(), getTensorInfoOut);
Derek Lamberti013c3902019-10-21 10:46:16 +01001248 std::vector<TensorInfo> outputs(beginO, endO);
1249
1250
1251 auto getTensorInfoPtr = [](const TensorInfo& info)
1252 {
1253 return &info;
1254 };
Finn Williams3e54d032020-10-22 16:53:35 +01001255 auto beginPtrI = MakeTransformIterator(inputs.begin(), getTensorInfoPtr);
1256 auto endPtrI = MakeTransformIterator(inputs.end(), getTensorInfoPtr);
Derek Lamberti013c3902019-10-21 10:46:16 +01001257 std::vector<const TensorInfo*> inputPtrs(beginPtrI, endPtrI);
1258
Finn Williams3e54d032020-10-22 16:53:35 +01001259 auto beginPtrO = MakeTransformIterator(outputs.begin(), getTensorInfoPtr);
1260 auto endPtrO = MakeTransformIterator(outputs.end(), getTensorInfoPtr);
Derek Lamberti013c3902019-10-21 10:46:16 +01001261 std::vector<const TensorInfo*> outputPtrs(beginPtrO, endPtrO);
1262
1263
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001264 result = layerSupportObject.IsStandInSupported(inputPtrs,
1265 outputPtrs,
1266 cLayer->GetParameters(),
1267 reason);
Derek Lamberti013c3902019-10-21 10:46:16 +01001268 break;
1269 }
Conor Kennedy430b5d82018-11-14 15:28:28 +00001270 case LayerType::StridedSlice:
1271 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001272 auto cLayer = PolymorphicDowncast<const StridedSliceLayer*>(&layer);
Conor Kennedy430b5d82018-11-14 15:28:28 +00001273 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1274 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001275 result = layerSupportObject.IsStridedSliceSupported(OverrideDataType(input, dataType),
1276 OverrideDataType(output, dataType),
1277 cLayer->GetParameters(),
1278 reason);
Conor Kennedy430b5d82018-11-14 15:28:28 +00001279 break;
1280 }
David Beckc2044fe2018-09-05 15:00:38 +01001281 case LayerType::Subtraction:
1282 {
Mike Kelly3ec30772023-03-08 13:47:17 +00001283 ARMNN_NO_DEPRECATE_WARN_BEGIN
David Beckc2044fe2018-09-05 15:00:38 +01001284 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1285 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1286 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001287 result = layerSupportObject.IsSubtractionSupported(
David Beckc2044fe2018-09-05 15:00:38 +01001288 OverrideDataType(input0, dataType),
1289 OverrideDataType(input1, dataType),
1290 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +01001291 reason);
Mike Kelly3ec30772023-03-08 13:47:17 +00001292 ARMNN_NO_DEPRECATE_WARN_END
David Beckc2044fe2018-09-05 15:00:38 +01001293 break;
1294 }
Sadik Armaganeff363d2019-04-05 15:25:46 +01001295 case LayerType::Switch:
1296 {
1297 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1298 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1299 const TensorInfo& output0 = layer.GetOutputSlot(0).GetTensorInfo();
1300 const TensorInfo& output1 = layer.GetOutputSlot(1).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001301 result = layerSupportObject.IsSwitchSupported(OverrideDataType(input0, dataType),
1302 OverrideDataType(input1, dataType),
1303 OverrideDataType(output0, dataType),
1304 OverrideDataType(output1, dataType),
1305 reason);
Sadik Armaganeff363d2019-04-05 15:25:46 +01001306 break;
1307 }
narpra0132b90462018-09-13 11:07:48 +01001308 case LayerType::Mean:
1309 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001310 auto cLayer = PolymorphicDowncast<const MeanLayer*>(&layer);
narpra0132b90462018-09-13 11:07:48 +01001311 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1312 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001313 result = layerSupportObject.IsMeanSupported(
narpra0132b90462018-09-13 11:07:48 +01001314 OverrideDataType(input, dataType),
1315 OverrideDataType(output, dataType),
1316 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +01001317 reason);
narpra0132b90462018-09-13 11:07:48 +01001318 break;
1319 }
kevmay0190539692018-11-29 08:40:19 +00001320 case LayerType::Minimum:
1321 {
Mike Kelly3ec30772023-03-08 13:47:17 +00001322 ARMNN_NO_DEPRECATE_WARN_BEGIN
kevmay0190539692018-11-29 08:40:19 +00001323 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1324 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1325 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001326 result = layerSupportObject.IsMinimumSupported(OverrideDataType(input0, dataType),
1327 OverrideDataType(input1, dataType),
1328 OverrideDataType(output, dataType),
1329 reason);
Mike Kelly3ec30772023-03-08 13:47:17 +00001330 ARMNN_NO_DEPRECATE_WARN_END
kevmay0190539692018-11-29 08:40:19 +00001331 break;
1332 }
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001333 case LayerType::Prelu:
1334 {
1335 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1336 const TensorInfo& alpha = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1337 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001338 result = layerSupportObject.IsPreluSupported(OverrideDataType(input, dataType),
1339 OverrideDataType(alpha, dataType),
1340 OverrideDataType(output, dataType),
1341 reason);
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001342 break;
1343 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001344 case LayerType::Transpose:
1345 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001346 auto cLayer = PolymorphicDowncast<const TransposeLayer*>(&layer);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001347 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1348 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001349 result = layerSupportObject.IsTransposeSupported(OverrideDataType(input, dataType),
1350 OverrideDataType(output, dataType),
1351 cLayer->GetParameters(),
1352 reason);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001353 break;
1354 }
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001355 case LayerType::TransposeConvolution2d:
1356 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001357 auto cLayer = PolymorphicDowncast<const TransposeConvolution2dLayer*>(&layer);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001358
1359 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1360 dataType);
1361 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1362
1363 const TransposeConvolution2dDescriptor& descriptor = cLayer->GetParameters();
1364
1365 Optional<TensorInfo> biases;
1366 if (descriptor.m_BiasEnabled)
1367 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001368 ARMNN_ASSERT(cLayer->m_Bias.get() != nullptr);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001369 biases = OverrideDataType(cLayer->m_Bias->GetTensorInfo(),
1370 GetBiasTypeFromWeightsType(dataType));
1371 }
1372
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001373 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001374 const TensorInfo weights = OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType);
1375
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001376 result = layerSupportObject.IsTransposeConvolution2dSupported(input,
1377 output,
1378 descriptor,
1379 weights,
1380 biases,
1381 reason);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001382
1383 break;
1384 }
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00001385 case LayerType::Reduce:
1386 {
1387 auto cLayer = PolymorphicDowncast<const ReduceLayer*>(&layer);
1388 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1389 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1390
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001391 result = layerSupportObject.IsReduceSupported(OverrideDataType(input, dataType),
1392 OverrideDataType(output, dataType),
1393 cLayer->GetParameters(),
1394 reason);
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00001395 break;
1396 }
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001397 case LayerType::UnidirectionalSequenceLstm:
1398 {
1399 auto cLayer = PolymorphicDowncast<const UnidirectionalSequenceLstmLayer*>(&layer);
1400 const UnidirectionalSequenceLstmDescriptor& descriptor = cLayer->GetParameters();
1401
1402 // All inputs.
1403 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1404 dataType);
1405 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
1406 dataType);
1407 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
1408 dataType);
1409 // Outputs
Mike Kelly12994962022-04-21 11:57:09 +01001410 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1411 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
1412 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001413
1414 // Basic parameters
1415 const TensorInfo& inputToForgetWeights
1416 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
1417 const TensorInfo& inputToCellWeights
1418 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
1419 const TensorInfo& inputToOutputWeights
1420 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
1421 const TensorInfo& recurrentToForgetWeights
1422 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
1423 const TensorInfo& recurrentToCellWeights
1424 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
1425 const TensorInfo& recurrentToOutputWeights
1426 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
1427 const TensorInfo& forgetGateBias
1428 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
1429 const TensorInfo& cellBias
1430 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
1431 const TensorInfo& outputGateBias
1432 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
1433
1434 LstmInputParamsInfo paramsInfo;
1435
1436 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
1437 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
1438 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
1439 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
1440 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
1441 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
1442 paramsInfo.m_ForgetGateBias = &forgetGateBias;
1443 paramsInfo.m_CellBias = &cellBias;
1444 paramsInfo.m_OutputGateBias = &outputGateBias;
1445
1446 // Optional parameters
1447 TensorInfo optInputToInputWeights;
1448 TensorInfo optRecurrentToInputWeights;
1449 TensorInfo optCellToInputWeights;
1450 TensorInfo optInputGateBias;
1451 TensorInfo optProjectionWeights;
1452 TensorInfo optProjectionBias;
1453 TensorInfo optCellToForgetWeights;
1454 TensorInfo optCellToOutputWeights;
1455 TensorInfo optInputLayerNormWeights;
1456 TensorInfo optForgetLayerNormWeights;
1457 TensorInfo optCellLayerNormWeights;
1458 TensorInfo optOutputLayerNormWeights;
1459
1460 if(!descriptor.m_CifgEnabled)
1461 {
1462 optInputToInputWeights =
1463 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
1464 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
1465
1466 optRecurrentToInputWeights =
1467 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
1468 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
1469 optInputGateBias =
1470 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
1471 paramsInfo.m_InputGateBias = &optInputGateBias;
1472 }
1473
1474 if(descriptor.m_ProjectionEnabled)
1475 {
1476 optProjectionWeights =
1477 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
1478 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
1479 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
1480 {
1481 optProjectionBias =
1482 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
1483 paramsInfo.m_ProjectionBias = &optProjectionBias;
1484 }
1485 }
1486
1487 if(descriptor.m_PeepholeEnabled)
1488 {
1489 if(!descriptor.m_CifgEnabled)
1490 {
1491 optCellToInputWeights =
1492 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
1493 dataType);
1494 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
1495 }
1496 optCellToForgetWeights =
1497 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
1498 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
1499 optCellToOutputWeights =
1500 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
1501 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
1502 }
1503
1504 if(descriptor.m_LayerNormEnabled)
1505 {
1506 if (!descriptor.m_CifgEnabled)
1507 {
1508 optInputLayerNormWeights = OverrideDataType(
1509 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
1510 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
1511 }
1512
1513 optForgetLayerNormWeights = OverrideDataType(
1514 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
1515 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
1516
1517 optCellLayerNormWeights = OverrideDataType(
1518 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
1519 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
1520
1521 optOutputLayerNormWeights = OverrideDataType(
1522 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
1523 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
1524 }
1525
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001526 result = layerSupportObject.IsUnidirectionalSequenceLstmSupported(input,
1527 outputStateIn,
1528 cellStateIn,
Mike Kelly12994962022-04-21 11:57:09 +01001529 outputStateOut,
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001530 cellStateOut,
Mike Kelly12994962022-04-21 11:57:09 +01001531 output,
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01001532 descriptor,
1533 paramsInfo,
1534 reason);
1535 break;
1536 }
telsoa014fcda012018-03-09 14:13:49 +00001537 default:
1538 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001539 ARMNN_ASSERT_MSG(false, "WorkloadFactory did not recognise type of layer.");
David Beck33f0ae02018-10-18 15:13:56 +01001540 reason.value() = "Unrecognised layer type";
telsoa014fcda012018-03-09 14:13:49 +00001541 result = false;
1542 break;
1543 }
1544 }
telsoa014fcda012018-03-09 14:13:49 +00001545 return result;
1546}
1547
Sadik Armagan045f6be2020-09-10 13:37:32 +01001548bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
1549 const IConnectableLayer& connectableLayer,
1550 Optional<DataType> dataType,
1551 std::string& outReasonIfUnsupported)
1552{
1553 return IsLayerConfigurationSupported(backendId, connectableLayer, dataType, outReasonIfUnsupported);
1554}
1555
David Beckdcb751f2018-10-03 11:42:42 +01001556bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +01001557 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +01001558 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +00001559{
Jan Eilersbb446e52020-04-02 13:56:54 +01001560 auto layer = PolymorphicDowncast<const Layer*>(&connectableLayer);
Sadik Armagan045f6be2020-09-10 13:37:32 +01001561 return IsLayerConfigurationSupported(layer->GetBackendId(), connectableLayer, dataType, outReasonIfUnsupported);
1562}
1563
1564// TODO merge with defaulted modelOptions above
1565bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
1566 Optional<DataType> dataType,
1567 std::string& outReasonIfUnsupported,
1568 const ModelOptions& modelOptions)
1569{
1570 auto layer = PolymorphicDowncast<const Layer*>(&connectableLayer);
1571 return IsLayerConfigurationSupported(layer->GetBackendId(),
1572 connectableLayer,
1573 dataType,
1574 outReasonIfUnsupported,
1575 modelOptions);
telsoa014fcda012018-03-09 14:13:49 +00001576}
1577
Sadik Armagan04a72972020-09-14 15:44:18 +01001578bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
1579 const IConnectableLayer& connectableLayer,
1580 Optional<DataType> dataType,
1581 std::string& outReasonIfUnsupported,
1582 const ModelOptions& modelOptions)
1583{
1584 return IsLayerConfigurationSupported(backendId,
1585 connectableLayer,
1586 dataType,
1587 outReasonIfUnsupported,
1588 modelOptions);
1589}
Teresa Charlin611c7fb2022-01-07 09:47:29 +00001590ARMNN_NO_DEPRECATE_WARN_BEGIN
1591std::unique_ptr<IWorkload> IWorkloadFactory::CreateWorkload(LayerType type,
1592 const QueueDescriptor& descriptor,
1593 const WorkloadInfo& info) const
1594{
1595 switch(type)
1596 {
1597 case LayerType::Activation :
1598 {
1599 auto activationQueueDescriptor = PolymorphicDowncast<const ActivationQueueDescriptor*>(&descriptor);
1600 return CreateActivation(*activationQueueDescriptor, info);
1601 }
1602 case LayerType::Addition :
1603 {
1604 auto additionQueueDescriptor = PolymorphicDowncast<const AdditionQueueDescriptor*>(&descriptor);
1605 return CreateAddition(*additionQueueDescriptor, info);
1606 }
1607 case LayerType::ArgMinMax :
1608 {
1609 auto argMinMaxQueueDescriptor = PolymorphicDowncast<const ArgMinMaxQueueDescriptor*>(&descriptor);
1610 return CreateArgMinMax(*argMinMaxQueueDescriptor, info);
1611 }
1612 case LayerType::BatchNormalization :
1613 {
1614 auto batchNormQueueDescriptor = PolymorphicDowncast<const BatchNormalizationQueueDescriptor*>(&descriptor);
1615 return CreateBatchNormalization(*batchNormQueueDescriptor, info);
1616 }
1617 case LayerType::BatchToSpaceNd :
1618 {
1619 auto batchToSpaceNdQueueDescriptor
1620 = PolymorphicDowncast<const BatchToSpaceNdQueueDescriptor*>(&descriptor);
1621 return CreateBatchToSpaceNd(*batchToSpaceNdQueueDescriptor, info);
1622 }
1623 case LayerType::Cast :
1624 {
1625 auto castQueueDescriptor = PolymorphicDowncast<const CastQueueDescriptor*>(&descriptor);
1626 return CreateCast(*castQueueDescriptor, info);
1627 }
1628 case LayerType::ChannelShuffle :
1629 {
1630 auto channelShuffleQueueDescriptor
1631 = PolymorphicDowncast<const ChannelShuffleQueueDescriptor*>(&descriptor);
1632 return CreateChannelShuffle(*channelShuffleQueueDescriptor, info);
1633 }
1634 case LayerType::Comparison :
1635 {
1636 auto comparisonQueueDescriptor = PolymorphicDowncast<const ComparisonQueueDescriptor*>(&descriptor);
1637 return CreateComparison(*comparisonQueueDescriptor, info);
1638 }
1639 case LayerType::Concat :
1640 {
1641 auto concatQueueDescriptor = PolymorphicDowncast<const ConcatQueueDescriptor*>(&descriptor);
1642 return CreateConcat(*concatQueueDescriptor, info);
1643 }
1644 case LayerType::Constant :
1645 {
1646 auto constantQueueDescriptor = PolymorphicDowncast<const ConstantQueueDescriptor*>(&descriptor);
1647 return CreateConstant(*constantQueueDescriptor, info);
1648 }
Teresa Charlin611c7fb2022-01-07 09:47:29 +00001649 case LayerType::ConvertFp16ToFp32:
1650 {
1651 auto convertFp16ToFp32QueueDescriptor
1652 = PolymorphicDowncast<const ConvertFp16ToFp32QueueDescriptor*>(&descriptor);
1653 return CreateConvertFp16ToFp32(*convertFp16ToFp32QueueDescriptor, info);
1654 }
Teresa Charlin611c7fb2022-01-07 09:47:29 +00001655 case LayerType::ConvertFp32ToFp16:
1656 {
1657 auto convertFp32ToFp16QueueDescriptor
1658 = PolymorphicDowncast<const ConvertFp32ToFp16QueueDescriptor*>(&descriptor);
1659 return CreateConvertFp32ToFp16(*convertFp32ToFp16QueueDescriptor, info);
1660 }
1661 case LayerType::Convolution2d:
1662 {
1663 auto convolution2dQueueDescriptor = PolymorphicDowncast<const Convolution2dQueueDescriptor*>(&descriptor);
1664 return CreateConvolution2d(*convolution2dQueueDescriptor, info);
1665 }
1666 case LayerType::Convolution3d:
1667 {
1668 auto convolution3dQueueDescriptor = PolymorphicDowncast<const Convolution3dQueueDescriptor*>(&descriptor);
1669 return CreateConvolution3d(*convolution3dQueueDescriptor, info);
1670 }
1671 case LayerType::Debug:
1672 {
1673 auto debugQueueDescriptor = PolymorphicDowncast<const DebugQueueDescriptor*>(&descriptor);
1674 return CreateDebug(*debugQueueDescriptor, info);
1675 }
1676 case LayerType::DepthToSpace:
1677 {
1678 auto depthToSpaceQueueDescriptor = PolymorphicDowncast<const DepthToSpaceQueueDescriptor*>(&descriptor);
1679 return CreateDepthToSpace(*depthToSpaceQueueDescriptor, info);
1680 }
1681 case LayerType::DepthwiseConvolution2d:
1682 {
1683 auto depthwiseConvolution2DQueueDescriptor
1684 = PolymorphicDowncast<const DepthwiseConvolution2dQueueDescriptor*>(&descriptor);
1685 return CreateDepthwiseConvolution2d(*depthwiseConvolution2DQueueDescriptor, info);
1686 }
1687 case LayerType::Dequantize:
1688 {
1689 auto dequantizeQueueDescriptor = PolymorphicDowncast<const DequantizeQueueDescriptor*>(&descriptor);
1690 return CreateDequantize(*dequantizeQueueDescriptor, info);
1691 }
1692 case LayerType::DetectionPostProcess:
1693 {
1694 auto detectionPostProcessQueueDescriptor
1695 = PolymorphicDowncast<const DetectionPostProcessQueueDescriptor*>(&descriptor);
1696 return CreateDetectionPostProcess(*detectionPostProcessQueueDescriptor, info);
1697 }
1698 case LayerType::Division:
1699 {
1700 auto divisionQueueDescriptor = PolymorphicDowncast<const DivisionQueueDescriptor*>(&descriptor);
1701 return CreateDivision(*divisionQueueDescriptor, info);
1702 }
Mike Kelly3ec30772023-03-08 13:47:17 +00001703 case LayerType::ElementwiseBinary:
1704 {
1705 auto queueDescriptor = PolymorphicDowncast<const ElementwiseBinaryQueueDescriptor*>(&descriptor);
1706 return CreateWorkload(LayerType::ElementwiseBinary, *queueDescriptor, info);
1707 }
Teresa Charlin611c7fb2022-01-07 09:47:29 +00001708 case LayerType::ElementwiseUnary:
1709 {
1710 auto elementwiseUnaryQueueDescriptor
1711 = PolymorphicDowncast<const ElementwiseUnaryQueueDescriptor*>(&descriptor);
1712 return CreateElementwiseUnary(*elementwiseUnaryQueueDescriptor, info);
1713
1714 }
1715 case LayerType::FakeQuantization:
1716 {
1717 auto fakeQuantizationQueueDescriptor
1718 = PolymorphicDowncast<const FakeQuantizationQueueDescriptor*>(&descriptor);
1719 return CreateFakeQuantization(*fakeQuantizationQueueDescriptor, info);
1720 }
1721 case LayerType::Fill:
1722 {
1723 auto fillQueueDescriptor = PolymorphicDowncast<const FillQueueDescriptor*>(&descriptor);
1724 return CreateFill(*fillQueueDescriptor, info);
1725 }
1726 case LayerType::Floor:
1727 {
1728 auto floorQueueDescriptor = PolymorphicDowncast<const FloorQueueDescriptor*>(&descriptor);
1729 return CreateFloor(*floorQueueDescriptor, info);
1730 }
1731 case LayerType::FullyConnected:
1732 {
1733 auto fullyConnectedQueueDescriptor
1734 = PolymorphicDowncast<const FullyConnectedQueueDescriptor*>(&descriptor);
1735 return CreateFullyConnected(*fullyConnectedQueueDescriptor, info);
1736 }
1737 case LayerType::Gather:
1738 {
1739 auto gatherQueueDescriptor = PolymorphicDowncast<const GatherQueueDescriptor*>(&descriptor);
1740 return CreateGather(*gatherQueueDescriptor, info);
1741 }
1742 case LayerType::Input:
1743 {
1744 auto inputQueueDescriptor = PolymorphicDowncast<const InputQueueDescriptor*>(&descriptor);
1745 return CreateInput(*inputQueueDescriptor, info);
1746 }
1747 case LayerType::InstanceNormalization:
1748 {
1749 auto instanceNormalizationQueueDescriptor
1750 = PolymorphicDowncast<const InstanceNormalizationQueueDescriptor*>(&descriptor);
1751 return CreateInstanceNormalization(*instanceNormalizationQueueDescriptor, info);
1752 }
1753 case LayerType::L2Normalization:
1754 {
1755 auto l2NormalizationQueueDescriptor
1756 = PolymorphicDowncast<const L2NormalizationQueueDescriptor*>(&descriptor);
1757 return CreateL2Normalization(*l2NormalizationQueueDescriptor, info);
1758 }
1759 case LayerType::LogicalBinary:
1760 {
1761 auto logicalBinaryQueueDescriptor = PolymorphicDowncast<const LogicalBinaryQueueDescriptor*>(&descriptor);
1762 return CreateLogicalBinary(*logicalBinaryQueueDescriptor, info);
1763 }
1764 case LayerType::LogSoftmax:
1765 {
1766 auto logSoftmaxQueueDescriptor = PolymorphicDowncast<const LogSoftmaxQueueDescriptor*>(&descriptor);
1767 return CreateLogSoftmax(*logSoftmaxQueueDescriptor, info);
1768 }
1769 case LayerType::Lstm:
1770 {
1771 auto lstmQueueDescriptor = PolymorphicDowncast<const LstmQueueDescriptor*>(&descriptor);
1772 return CreateLstm(*lstmQueueDescriptor, info);
1773 }
1774 case LayerType::Maximum:
1775 {
1776 auto maximumQueueDescriptor = PolymorphicDowncast<const MaximumQueueDescriptor*>(&descriptor);
1777 return CreateMaximum(*maximumQueueDescriptor, info);
1778 }
1779 case LayerType::Mean:
1780 {
1781 auto meanQueueDescriptor = PolymorphicDowncast<const MeanQueueDescriptor*>(&descriptor);
1782 return CreateMean(*meanQueueDescriptor, info);
1783 }
1784 case LayerType::MemCopy:
1785 {
1786 auto memCopyQueueDescriptor = PolymorphicDowncast<const MemCopyQueueDescriptor*>(&descriptor);
1787 return CreateMemCopy(*memCopyQueueDescriptor, info);
1788 }
1789 case LayerType::MemImport:
1790 {
1791 auto memImportQueueDescriptor = PolymorphicDowncast<const MemImportQueueDescriptor*>(&descriptor);
1792 return CreateMemImport(*memImportQueueDescriptor, info);
1793 }
1794 case LayerType::Minimum:
1795 {
1796 auto minimumQueueDescriptor = PolymorphicDowncast<const MinimumQueueDescriptor*>(&descriptor);
1797 return CreateMinimum(*minimumQueueDescriptor, info);
1798 }
1799 case LayerType::Multiplication:
1800 {
1801 auto multiplicationQueueDescriptor
1802 = PolymorphicDowncast<const MultiplicationQueueDescriptor*>(&descriptor);
1803 return CreateMultiplication(*multiplicationQueueDescriptor, info);
1804 }
1805 case LayerType::Normalization:
1806 {
1807 auto normalizationQueueDescriptor = PolymorphicDowncast<const NormalizationQueueDescriptor*>(&descriptor);
1808 return CreateNormalization(*normalizationQueueDescriptor, info);
1809 }
1810 case LayerType::Output:
1811 {
1812 auto outputQueueDescriptor = PolymorphicDowncast<const OutputQueueDescriptor*>(&descriptor);
1813 return CreateOutput(*outputQueueDescriptor, info);
1814 }
1815 case LayerType::Pad:
1816 {
1817 auto padQueueDescriptor = PolymorphicDowncast<const PadQueueDescriptor*>(&descriptor);
1818 return CreatePad(*padQueueDescriptor, info);
1819 }
1820 case LayerType::Permute:
1821 {
1822 auto permuteQueueDescriptor = PolymorphicDowncast<const PermuteQueueDescriptor*>(&descriptor);
1823 return CreatePermute(*permuteQueueDescriptor, info);
1824 }
1825 case LayerType::Pooling2d:
1826 {
1827 auto pooling2dQueueDescriptor = PolymorphicDowncast<const Pooling2dQueueDescriptor*>(&descriptor);
1828 return CreatePooling2d(*pooling2dQueueDescriptor, info);
1829 }
1830 case LayerType::Pooling3d:
1831 {
1832 auto pooling3dQueueDescriptor = PolymorphicDowncast<const Pooling3dQueueDescriptor*>(&descriptor);
1833 return CreatePooling3d(*pooling3dQueueDescriptor, info);
1834 }
1835 case LayerType::PreCompiled:
1836 {
1837 auto preCompiledQueueDescriptor = PolymorphicDowncast<const PreCompiledQueueDescriptor*>(&descriptor);
1838 return CreatePreCompiled(*preCompiledQueueDescriptor, info);
1839 }
1840 case LayerType::Prelu:
1841 {
1842 auto preluQueueDescriptor = PolymorphicDowncast<const PreluQueueDescriptor*>(&descriptor);
1843 return CreatePrelu(*preluQueueDescriptor, info);
1844 }
1845 case LayerType::QLstm:
1846 {
1847 auto qlstmQueueDescriptor = PolymorphicDowncast<const QLstmQueueDescriptor*>(&descriptor);
1848 return CreateQLstm(*qlstmQueueDescriptor, info);
1849 }
1850 case LayerType::Quantize:
1851 {
1852 auto quantizeQueueDescriptor = PolymorphicDowncast<const QuantizeQueueDescriptor*>(&descriptor);
1853 return CreateQuantize(*quantizeQueueDescriptor, info);
1854 }
1855 case LayerType::Rank:
1856 {
1857 auto rankQueueDescriptor = PolymorphicDowncast<const RankQueueDescriptor*>(&descriptor);
1858 return CreateRank(*rankQueueDescriptor, info);
1859 }
1860 case LayerType::Reduce:
1861 {
1862 auto reduceQueueDescriptor = PolymorphicDowncast<const ReduceQueueDescriptor*>(&descriptor);
1863 return CreateReduce(*reduceQueueDescriptor, info);
1864 }
1865 case LayerType::Reshape:
1866 {
1867 auto reshapeQueueDescriptor = PolymorphicDowncast<const ReshapeQueueDescriptor*>(&descriptor);
1868 return CreateReshape(*reshapeQueueDescriptor, info);
1869 }
1870 case LayerType::Resize:
1871 {
1872 auto resizeQueueDescriptor = PolymorphicDowncast<const ResizeQueueDescriptor*>(&descriptor);
1873 return CreateResize(*resizeQueueDescriptor, info);
1874 }
1875 case LayerType::Shape:
1876 {
1877 auto shapeQueueDescriptor = PolymorphicDowncast<const ShapeQueueDescriptor*>(&descriptor);
1878 return CreateShape(*shapeQueueDescriptor, info);
1879 }
1880 case LayerType::Slice:
1881 {
1882 auto sliceQueueDescriptor = PolymorphicDowncast<const SliceQueueDescriptor*>(&descriptor);
1883 return CreateSlice(*sliceQueueDescriptor, info);
1884 }
1885 case LayerType::Softmax:
1886 {
1887 auto softmaxQueueDescriptor = PolymorphicDowncast<const SoftmaxQueueDescriptor*>(&descriptor);
1888 return CreateSoftmax(*softmaxQueueDescriptor, info);
1889 }
1890 case LayerType::SpaceToBatchNd:
1891 {
1892 auto spaceToBatchNdQueueDescriptor
1893 = PolymorphicDowncast<const SpaceToBatchNdQueueDescriptor*>(&descriptor);
1894 return CreateSpaceToBatchNd(*spaceToBatchNdQueueDescriptor, info);
1895 }
1896 case LayerType::SpaceToDepth:
1897 {
1898 auto spaceToDepthQueueDescriptor = PolymorphicDowncast<const SpaceToDepthQueueDescriptor*>(&descriptor);
1899 return CreateSpaceToDepth(*spaceToDepthQueueDescriptor, info);
1900 }
1901 case LayerType::Splitter:
1902 {
1903 auto splitterQueueDescriptor = PolymorphicDowncast<const SplitterQueueDescriptor*>(&descriptor);
1904 return CreateSplitter(*splitterQueueDescriptor, info);
1905 }
1906 case LayerType::Stack:
1907 {
1908 auto stackQueueDescriptor = PolymorphicDowncast<const StackQueueDescriptor*>(&descriptor);
1909 return CreateStack(*stackQueueDescriptor, info);
1910 }
1911 case LayerType::StridedSlice:
1912 {
1913 auto stridedSliceQueueDescriptor = PolymorphicDowncast<const StridedSliceQueueDescriptor*>(&descriptor);
1914 return CreateStridedSlice(*stridedSliceQueueDescriptor, info);
1915 }
1916 case LayerType::Subtraction:
1917 {
1918 auto subtractionQueueDescriptor = PolymorphicDowncast<const SubtractionQueueDescriptor*>(&descriptor);
1919 return CreateSubtraction(*subtractionQueueDescriptor, info);
1920 }
1921 case LayerType::Transpose:
1922 {
1923 auto transposeQueueDescriptor = PolymorphicDowncast<const TransposeQueueDescriptor*>(&descriptor);
1924 return CreateTranspose(*transposeQueueDescriptor, info);
1925 }
1926 case LayerType::TransposeConvolution2d:
1927 {
1928 auto transposeConvolution2dQueueDescriptor
1929 = PolymorphicDowncast<const TransposeConvolution2dQueueDescriptor*>(&descriptor);
1930 return CreateTransposeConvolution2d(*transposeConvolution2dQueueDescriptor, info);
1931 }
1932 case LayerType::UnidirectionalSequenceLstm:
1933 {
1934 auto unidirectionalSequenceLstmQueueDescriptor
1935 = PolymorphicDowncast<const UnidirectionalSequenceLstmQueueDescriptor*>(&descriptor);
1936 return CreateUnidirectionalSequenceLstm(*unidirectionalSequenceLstmQueueDescriptor, info);
1937 }
1938 default:
1939 return nullptr;
1940 }
1941}
1942ARMNN_NO_DEPRECATE_WARN_END
Sadik Armagan04a72972020-09-14 15:44:18 +01001943
Derek Lamberti901ea112019-12-10 22:07:09 +00001944std::unique_ptr<IWorkload> IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& /*descriptor*/,
1945 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001946{
1947 return std::unique_ptr<IWorkload>();
1948}
1949
Derek Lamberti901ea112019-12-10 22:07:09 +00001950std::unique_ptr<IWorkload> IWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& /*descriptor*/,
1951 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001952{
1953 return std::unique_ptr<IWorkload>();
1954}
1955
Derek Lamberti901ea112019-12-10 22:07:09 +00001956std::unique_ptr<IWorkload> IWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& /*descriptor*/,
1957 const WorkloadInfo& /*info*/) const
Nikhil Rajee391d52019-09-05 17:50:44 +01001958{
1959 return std::unique_ptr<IWorkload>();
1960}
1961
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001962std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00001963 const BatchNormalizationQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001964{
1965 return std::unique_ptr<IWorkload>();
1966}
1967
Derek Lamberti901ea112019-12-10 22:07:09 +00001968std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& /*desc*/,
1969 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001970{
1971 return std::unique_ptr<IWorkload>();
1972}
1973
mathad01b392e982021-04-07 12:07:30 +01001974std::unique_ptr<IWorkload> IWorkloadFactory::CreateCast(const CastQueueDescriptor& /*descriptor*/,
1975 const WorkloadInfo& /*info*/) const
1976{
1977 return std::unique_ptr<IWorkload>();
1978}
1979
Simon Obute51f67772021-09-03 15:50:13 +01001980std::unique_ptr<IWorkload> IWorkloadFactory::CreateChannelShuffle(const ChannelShuffleQueueDescriptor& /*descriptor*/,
1981 const WorkloadInfo& /*info*/) const
1982{
1983 return std::unique_ptr<IWorkload>();
1984}
1985
Derek Lamberti901ea112019-12-10 22:07:09 +00001986std::unique_ptr<IWorkload> IWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& /*descriptor*/,
1987 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001988{
1989 return std::unique_ptr<IWorkload>();
1990}
1991
Derek Lamberti901ea112019-12-10 22:07:09 +00001992std::unique_ptr<IWorkload> IWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& /*descriptor*/,
1993 const WorkloadInfo& /*info*/) const
Jim Flynn4ed6c832019-05-20 11:02:46 +01001994{
1995 return std::unique_ptr<IWorkload>();
1996}
1997
Derek Lamberti901ea112019-12-10 22:07:09 +00001998std::unique_ptr<IWorkload> IWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& /*descriptor*/,
1999 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002000{
2001 return std::unique_ptr<IWorkload>();
2002}
2003
Derek Lamberti901ea112019-12-10 22:07:09 +00002004std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp16ToFp32(const ConvertFp16ToFp32QueueDescriptor& /*desc*/,
2005 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002006{
2007 return std::unique_ptr<IWorkload>();
2008}
2009
Derek Lamberti901ea112019-12-10 22:07:09 +00002010std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToFp16(const ConvertFp32ToFp16QueueDescriptor& /*desc*/,
2011 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002012{
2013 return std::unique_ptr<IWorkload>();
2014}
2015
Derek Lamberti901ea112019-12-10 22:07:09 +00002016std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& /*descriptor*/,
2017 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002018{
2019 return std::unique_ptr<IWorkload>();
2020}
2021
Matthew Sloyanb63a3112021-09-08 13:05:51 +01002022std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution3d(const Convolution3dQueueDescriptor& /*descriptor*/,
2023 const WorkloadInfo& /*info*/) const
2024{
2025 return std::unique_ptr<IWorkload>();
2026}
2027
Derek Lamberti901ea112019-12-10 22:07:09 +00002028std::unique_ptr<IWorkload> IWorkloadFactory::CreateDebug(const DebugQueueDescriptor& /*descriptor*/,
2029 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002030{
2031 return std::unique_ptr<IWorkload>();
2032}
2033
Derek Lamberti901ea112019-12-10 22:07:09 +00002034std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& /*descriptor*/,
2035 const WorkloadInfo& /*info*/) const
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +01002036{
2037 return std::unique_ptr<IWorkload>();
2038}
2039
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002040std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthwiseConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00002041 const DepthwiseConvolution2dQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002042{
2043 return std::unique_ptr<IWorkload>();
2044}
2045
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002046std::unique_ptr<IWorkload> IWorkloadFactory::CreateDequantize(
Derek Lamberti901ea112019-12-10 22:07:09 +00002047 const DequantizeQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002048{
2049 return std::unique_ptr<IWorkload>();
2050}
2051
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002052std::unique_ptr<IWorkload> IWorkloadFactory::CreateDetectionPostProcess(
Derek Lamberti901ea112019-12-10 22:07:09 +00002053 const DetectionPostProcessQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002054{
2055 return std::unique_ptr<IWorkload>();
2056}
2057
Derek Lamberti901ea112019-12-10 22:07:09 +00002058std::unique_ptr<IWorkload> IWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& /*descriptor*/,
2059 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002060{
2061 return std::unique_ptr<IWorkload>();
2062}
2063
josh minor4a3c6102020-01-06 16:40:46 -06002064std::unique_ptr<IWorkload> IWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& /*desc*/,
2065 const WorkloadInfo& /*info*/) const
2066{
2067 return std::unique_ptr<IWorkload>();
2068}
2069
Derek Lamberti901ea112019-12-10 22:07:09 +00002070std::unique_ptr<IWorkload> IWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& /*desc*/,
2071 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002072{
2073 return std::unique_ptr<IWorkload>();
2074}
2075
Ryan OSheaec6c6802020-06-05 17:17:06 +01002076std::unique_ptr<IWorkload> IWorkloadFactory::CreateFill(const FillQueueDescriptor& /*descriptor*/,
2077 const WorkloadInfo& /*info*/) const
2078{
2079 return std::unique_ptr<IWorkload>();
2080}
2081
Derek Lamberti901ea112019-12-10 22:07:09 +00002082std::unique_ptr<IWorkload> IWorkloadFactory::CreateFloor(const FloorQueueDescriptor& /*descriptor*/,
2083 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002084{
2085 return std::unique_ptr<IWorkload>();
2086}
2087
Derek Lamberti901ea112019-12-10 22:07:09 +00002088std::unique_ptr<IWorkload> IWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& /*descriptor*/,
2089 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002090{
2091 return std::unique_ptr<IWorkload>();
2092}
2093
Derek Lamberti901ea112019-12-10 22:07:09 +00002094std::unique_ptr<IWorkload> IWorkloadFactory::CreateGather(const GatherQueueDescriptor& /*descriptor*/,
2095 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002096{
2097 return std::unique_ptr<IWorkload>();
2098}
2099
Kevin Mayce5045a2019-10-02 14:07:47 +01002100std::unique_ptr<IWorkload> IWorkloadFactory::CreateInstanceNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00002101 const InstanceNormalizationQueueDescriptor& /*descriptor*/,
2102 const WorkloadInfo& /*info*/) const
Kevin Mayce5045a2019-10-02 14:07:47 +01002103{
2104 return std::unique_ptr<IWorkload>();
2105}
2106
Derek Lamberti901ea112019-12-10 22:07:09 +00002107std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& /*desc*/,
2108 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002109{
2110 return std::unique_ptr<IWorkload>();
2111}
2112
James Conroyaba90cd2020-11-06 16:28:18 +00002113std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& /*desc*/,
2114 const WorkloadInfo& /*info*/) const
2115{
2116 return std::unique_ptr<IWorkload>();
2117}
2118
2119std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogicalUnary(const ElementwiseUnaryQueueDescriptor& /*desc*/,
2120 const WorkloadInfo& /*info*/) const
2121{
2122 return std::unique_ptr<IWorkload>();
2123}
2124
Derek Lamberti901ea112019-12-10 22:07:09 +00002125std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& /*descriptor*/,
2126 const WorkloadInfo& /*info*/) const
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +01002127{
2128 return std::unique_ptr<IWorkload>();
2129}
2130
Derek Lamberti901ea112019-12-10 22:07:09 +00002131std::unique_ptr<IWorkload> IWorkloadFactory::CreateLstm(const LstmQueueDescriptor& /*descriptor*/,
2132 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002133{
2134 return std::unique_ptr<IWorkload>();
2135}
2136
Derek Lamberti901ea112019-12-10 22:07:09 +00002137std::unique_ptr<IWorkload> IWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& /*descriptor*/,
2138 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002139{
2140 return std::unique_ptr<IWorkload>();
2141}
2142
Derek Lamberti901ea112019-12-10 22:07:09 +00002143std::unique_ptr<IWorkload> IWorkloadFactory::CreateMean(const MeanQueueDescriptor& /*descriptor*/,
2144 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002145{
2146 return std::unique_ptr<IWorkload>();
2147}
2148
Derek Lamberti901ea112019-12-10 22:07:09 +00002149std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& /*descriptor*/,
2150 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002151{
2152 return std::unique_ptr<IWorkload>();
2153}
2154
Derek Lamberti901ea112019-12-10 22:07:09 +00002155std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& /*descriptor*/,
2156 const WorkloadInfo& /*info*/) const
Derek Lambertif674aa02019-08-01 15:56:25 +01002157{
2158 return std::unique_ptr<IWorkload>();
2159}
2160
Derek Lamberti901ea112019-12-10 22:07:09 +00002161std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerge(const MergeQueueDescriptor& /*descriptor*/,
2162 const WorkloadInfo& /*info*/) const
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002163{
2164 return std::unique_ptr<IWorkload>();
2165}
2166
Derek Lamberti901ea112019-12-10 22:07:09 +00002167std::unique_ptr<IWorkload> IWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& /*descriptor*/,
2168 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002169{
2170 return std::unique_ptr<IWorkload>();
2171}
2172
Derek Lamberti901ea112019-12-10 22:07:09 +00002173std::unique_ptr<IWorkload> IWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& /*descriptor*/,
2174 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002175{
2176 return std::unique_ptr<IWorkload>();
2177}
2178
Derek Lamberti901ea112019-12-10 22:07:09 +00002179std::unique_ptr<IWorkload> IWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& /*descriptor*/,
2180 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002181{
2182 return std::unique_ptr<IWorkload>();
2183}
2184
Derek Lamberti901ea112019-12-10 22:07:09 +00002185std::unique_ptr<IWorkload> IWorkloadFactory::CreateOutput(const OutputQueueDescriptor& /*descriptor*/,
2186 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002187{
2188 return std::unique_ptr<IWorkload>();
2189}
2190
Derek Lamberti901ea112019-12-10 22:07:09 +00002191std::unique_ptr<IWorkload> IWorkloadFactory::CreatePad(const PadQueueDescriptor& /*descriptor*/,
2192 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002193{
2194 return std::unique_ptr<IWorkload>();
2195}
2196
Derek Lamberti901ea112019-12-10 22:07:09 +00002197std::unique_ptr<IWorkload> IWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& /*descriptor*/,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002198 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002199{
2200 return std::unique_ptr<IWorkload>();
2201}
2202
Derek Lamberti901ea112019-12-10 22:07:09 +00002203std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& /*descriptor*/,
2204 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002205{
2206 return std::unique_ptr<IWorkload>();
2207}
2208
Tamás Nyíri7b885b32021-10-26 14:47:57 +01002209std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling3d(const Pooling3dQueueDescriptor& /*descriptor*/,
2210 const WorkloadInfo& /*info*/) const
2211{
2212 return std::unique_ptr<IWorkload>();
2213}
2214
Derek Lamberti901ea112019-12-10 22:07:09 +00002215std::unique_ptr<IWorkload> IWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
2216 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002217{
2218 return std::unique_ptr<IWorkload>();
2219}
2220
Derek Lamberti901ea112019-12-10 22:07:09 +00002221std::unique_ptr<IWorkload> IWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &/*descriptor*/,
2222 const WorkloadInfo &/*info*/) const
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01002223{
2224 return std::unique_ptr<IWorkload>();
2225}
2226
Derek Lamberti901ea112019-12-10 22:07:09 +00002227std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& /*descriptor*/,
2228 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002229{
2230 return std::unique_ptr<IWorkload>();
2231}
2232
James Conroy586a9aa2020-03-20 08:49:33 +00002233std::unique_ptr<IWorkload> IWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& /*descriptor*/,
2234 const WorkloadInfo& /*info*/) const
2235{
2236 return std::unique_ptr<IWorkload>();
2237}
2238
Derek Lamberti901ea112019-12-10 22:07:09 +00002239std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& /*descriptor*/,
2240 const WorkloadInfo& /*info*/) const
James Conroyee18dc82019-07-17 11:27:46 +01002241{
2242 return std::unique_ptr<IWorkload>();
2243}
Finn Williams2605b232020-06-10 15:53:46 +01002244std::unique_ptr<IWorkload> IWorkloadFactory::CreateRank(const RankQueueDescriptor& /*descriptor*/,
2245 const WorkloadInfo& /*info*/) const
2246{
2247 return std::unique_ptr<IWorkload>();
2248}
James Conroyee18dc82019-07-17 11:27:46 +01002249
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002250std::unique_ptr<IWorkload> IWorkloadFactory::CreateReduce(const ReduceQueueDescriptor& /*descriptor*/,
2251 const WorkloadInfo& /*info*/) const
2252{
2253 return std::unique_ptr<IWorkload>();
2254}
2255
Derek Lamberti901ea112019-12-10 22:07:09 +00002256std::unique_ptr<IWorkload> IWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& /*descriptor*/,
2257 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002258{
2259 return std::unique_ptr<IWorkload>();
2260}
2261
Derek Lamberti901ea112019-12-10 22:07:09 +00002262std::unique_ptr<IWorkload> IWorkloadFactory::CreateResize(const ResizeQueueDescriptor& /*descriptor*/,
2263 const WorkloadInfo& /*info*/) const
Teresa Charlina9075df2019-06-27 15:41:57 +01002264{
2265 return std::unique_ptr<IWorkload>();
2266}
2267
Keith Davis3ae3f972021-05-21 16:33:48 +01002268std::unique_ptr<IWorkload> IWorkloadFactory::CreateShape(const ShapeQueueDescriptor& /*descriptor*/,
2269 const WorkloadInfo& /*info*/) const
2270{
2271 return std::unique_ptr<IWorkload>();
2272}
2273
Derek Lamberti901ea112019-12-10 22:07:09 +00002274std::unique_ptr<IWorkload> IWorkloadFactory::CreateSlice(const SliceQueueDescriptor& /*descriptor*/,
2275 const WorkloadInfo& /*info*/) const
2276{
2277 return std::unique_ptr<IWorkload>();
2278}
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002279
Derek Lamberti901ea112019-12-10 22:07:09 +00002280std::unique_ptr<IWorkload> IWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& /*descriptor*/,
2281 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01002282{
2283 return std::unique_ptr<IWorkload>();
2284}
2285
Derek Lamberti901ea112019-12-10 22:07:09 +00002286std::unique_ptr<IWorkload> IWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& /*descriptor*/,
2287 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002288{
2289 return std::unique_ptr<IWorkload>();
2290}
2291
Derek Lamberti901ea112019-12-10 22:07:09 +00002292std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& /*descriptor*/,
2293 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002294{
2295 return std::unique_ptr<IWorkload>();
2296}
2297
Derek Lamberti901ea112019-12-10 22:07:09 +00002298std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& /*descriptor*/,
2299 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002300{
2301 return std::unique_ptr<IWorkload>();
2302}
2303
Derek Lamberti901ea112019-12-10 22:07:09 +00002304std::unique_ptr<IWorkload> IWorkloadFactory::CreateStack(const StackQueueDescriptor& /*descriptor*/,
2305 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar972af152019-06-11 14:14:03 +01002306{
2307 return std::unique_ptr<IWorkload>();
2308}
2309
Derek Lamberti901ea112019-12-10 22:07:09 +00002310std::unique_ptr<IWorkload> IWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& /*descriptor*/,
2311 const WorkloadInfo& /*info*/) const
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01002312{
2313 return std::unique_ptr<IWorkload>();
2314}
2315
Derek Lamberti901ea112019-12-10 22:07:09 +00002316std::unique_ptr<IWorkload> IWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& /*descriptor*/,
2317 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00002318{
2319 return std::unique_ptr<IWorkload>();
2320}
2321
Derek Lamberti901ea112019-12-10 22:07:09 +00002322std::unique_ptr<IWorkload> IWorkloadFactory::CreateSwitch(const SwitchQueueDescriptor& /*descriptor*/,
2323 const WorkloadInfo& /*info*/) const
Sadik Armaganeff363d2019-04-05 15:25:46 +01002324{
2325 return std::unique_ptr<IWorkload>();
2326}
2327
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002328std::unique_ptr<IWorkload> IWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& /*descriptor*/,
2329 const WorkloadInfo& /*info*/) const
2330{
2331 return std::unique_ptr<IWorkload>();
2332}
2333
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01002334std::unique_ptr<IWorkload> IWorkloadFactory::CreateTransposeConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00002335 const TransposeConvolution2dQueueDescriptor& /*descriptor*/,
2336 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01002337{
2338 return std::unique_ptr<IWorkload>();
surmeh013537c2c2018-05-18 16:31:43 +01002339}
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01002340
Narumol Prangnawarat8ed39ae2021-07-15 16:16:25 +01002341std::unique_ptr<IWorkload> IWorkloadFactory::CreateUnidirectionalSequenceLstm(
2342 const UnidirectionalSequenceLstmQueueDescriptor& /*descriptor*/,
2343 const WorkloadInfo& /*info*/) const
2344{
2345 return std::unique_ptr<IWorkload>();
2346}
2347
Francis Murtagh9270d9e2022-08-12 13:54:17 +01002348std::unique_ptr<IWorkload> IWorkloadFactory::CreateInput(
2349 const InputQueueDescriptor& /*descriptor*/,
2350 const WorkloadInfo& /*info*/) const
2351{
2352 return std::unique_ptr<IWorkload>();
2353}
2354
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01002355} // namepsace armnn