blob: 3a8a2ae18f7d571d3cd75a67391314bb4940209b [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Teresa Charlin52664732020-06-29 16:27:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00005
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00006#include <Layer.hpp>
7#include <LayersFwd.hpp>
David Beckdcb751f2018-10-03 11:42:42 +01008
David Beckb4540be2018-09-24 13:18:27 +01009#include <armnn/Types.hpp>
10#include <armnn/LayerSupport.hpp>
David Beck111b5d92018-11-12 14:59:37 +000011#include <armnn/ILayerSupport.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000012#include <armnn/BackendRegistry.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010013#include <armnn/utility/PolymorphicDowncast.hpp>
Finn Williams3e54d032020-10-22 16:53:35 +010014#include <armnn/utility/TransformIterator.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000015
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000016#include <backendsCommon/WorkloadFactory.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000017#include <backendsCommon/CpuTensorHandle.hpp>
Matteo Martincighe5b8eb92019-11-28 15:45:42 +000018
Francis Murtagh46c09d02019-05-28 08:15:28 +010019#include <backendsCommon/test/WorkloadTestUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000020
David Beck111b5d92018-11-12 14:59:37 +000021#include <sstream>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000022
telsoa014fcda012018-03-09 14:13:49 +000023namespace armnn
24{
25
telsoa01c577f2c2018-08-31 09:22:23 +010026namespace
27{
Finn Williams3e54d032020-10-22 16:53:35 +010028using LayerList = std::list<Layer*>;
29using Iterator = LayerList::const_iterator; // Const so pointers in the list can't be modified externally.
telsoa01c577f2c2018-08-31 09:22:23 +010030
David Beck29c75de2018-10-23 13:35:58 +010031const TensorInfo OverrideDataType(const TensorInfo& info, Optional<DataType> type)
32{
33 if (!type)
34 {
35 return info;
telsoa01c577f2c2018-08-31 09:22:23 +010036 }
37
David Beck29c75de2018-10-23 13:35:58 +010038 return TensorInfo(info.GetShape(), type.value(), info.GetQuantizationScale(), info.GetQuantizationOffset());
telsoa01c577f2c2018-08-31 09:22:23 +010039}
40
David Beck29c75de2018-10-23 13:35:58 +010041} // anonymous namespace
42
Sadik Armagan045f6be2020-09-10 13:37:32 +010043bool IWorkloadFactory::IsLayerConfigurationSupported(const BackendId& backendId,
44 const IConnectableLayer& connectableLayer,
45 Optional<DataType> dataType,
46 std::string& outReasonIfUnsupported,
47 const ModelOptions& modelOptions)
telsoa014fcda012018-03-09 14:13:49 +000048{
David Beck33f0ae02018-10-18 15:13:56 +010049 Optional<std::string&> reason = outReasonIfUnsupported;
telsoa014fcda012018-03-09 14:13:49 +000050 bool result;
Jan Eilersbb446e52020-04-02 13:56:54 +010051 const Layer& layer = *(PolymorphicDowncast<const Layer*>(&connectableLayer));
David Beckdcb751f2018-10-03 11:42:42 +010052
David Beck111b5d92018-11-12 14:59:37 +000053 auto const& backendRegistry = BackendRegistryInstance();
54 if (!backendRegistry.IsBackendRegistered(backendId))
55 {
56 std::stringstream ss;
57 ss << connectableLayer.GetName() << " is not supported on " << backendId
58 << " because this backend is not registered.";
59
60 outReasonIfUnsupported = ss.str();
61 return false;
62 }
63
64 auto backendFactory = backendRegistry.GetFactory(backendId);
65 auto backendObject = backendFactory();
Sadik Armagan045f6be2020-09-10 13:37:32 +010066 auto layerSupportObject = backendObject->GetLayerSupport(modelOptions);
David Beck33f0ae02018-10-18 15:13:56 +010067
telsoa014fcda012018-03-09 14:13:49 +000068 switch(layer.GetType())
69 {
70 case LayerType::Activation:
71 {
Jan Eilersbb446e52020-04-02 13:56:54 +010072 auto cLayer = PolymorphicDowncast<const ActivationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +000073 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +010074 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010075 result = layerSupportObject->IsActivationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010076 OverrideDataType(input, dataType),
77 OverrideDataType(output, dataType),
78 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +010079 reason);
telsoa014fcda012018-03-09 14:13:49 +000080 break;
81 }
82 case LayerType::Addition:
83 {
84 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
85 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
86 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010087 result = layerSupportObject->IsAdditionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010088 OverrideDataType(input0, dataType),
89 OverrideDataType(input1, dataType),
90 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +010091 reason);
telsoa014fcda012018-03-09 14:13:49 +000092 break;
93 }
Nikhil Rajee391d52019-09-05 17:50:44 +010094 case LayerType::ArgMinMax:
95 {
Jan Eilersbb446e52020-04-02 13:56:54 +010096 auto cLayer = PolymorphicDowncast<const ArgMinMaxLayer*>(&layer);
Nikhil Rajee391d52019-09-05 17:50:44 +010097 const ArgMinMaxDescriptor& descriptor = cLayer->GetParameters();
98
99 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
100 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
101 result = layerSupportObject->IsArgMinMaxSupported(
102 OverrideDataType(input, dataType),
Narumol Prangnawaratd1f57732019-10-31 14:24:02 +0000103 OverrideDataType(output, DataType::Signed32),
Nikhil Rajee391d52019-09-05 17:50:44 +0100104 descriptor,
105 reason);
106 break;
107 }
telsoa014fcda012018-03-09 14:13:49 +0000108 case LayerType::BatchNormalization:
109 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100110 auto cLayer = PolymorphicDowncast<const BatchNormalizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000111 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100112 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
113 const TensorInfo& mean = cLayer->m_Mean->GetTensorInfo();
114 const TensorInfo& var = cLayer->m_Variance->GetTensorInfo();
115 const TensorInfo& beta = cLayer->m_Beta->GetTensorInfo();
116 const TensorInfo& gamma = cLayer->m_Gamma->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100117 result = layerSupportObject->IsBatchNormalizationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100118 OverrideDataType(input, dataType),
119 OverrideDataType(output, dataType),
120 OverrideDataType(mean, dataType),
121 OverrideDataType(var, dataType),
122 OverrideDataType(beta, dataType),
123 OverrideDataType(gamma, dataType),
124 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100125 reason);
telsoa014fcda012018-03-09 14:13:49 +0000126 break;
127 }
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000128 case LayerType::BatchToSpaceNd:
129 {
130 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
131 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Jan Eilersbb446e52020-04-02 13:56:54 +0100132 auto cLayer = PolymorphicDowncast<const BatchToSpaceNdLayer*>(&layer);
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000133
134 result = layerSupportObject->IsBatchToSpaceNdSupported(OverrideDataType(input, dataType),
135 OverrideDataType(output, dataType),
136 cLayer->GetParameters(),
137 reason);
138 break;
139 }
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100140 case LayerType::Comparison:
141 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100142 auto cLayer = PolymorphicDowncast<const ComparisonLayer*>(&layer);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100143
144 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
145 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
146 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
147
148 result = layerSupportObject->IsComparisonSupported(OverrideDataType(input0, dataType),
149 OverrideDataType(input1, dataType),
150 OverrideDataType(output, DataType::Boolean),
151 cLayer->GetParameters(),
152 reason);
153 break;
154 }
telsoa014fcda012018-03-09 14:13:49 +0000155 case LayerType::Constant:
156 {
157 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100158 result = layerSupportObject->IsConstantSupported(OverrideDataType(output, dataType), reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100159 break;
160 }
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +0000161 case LayerType::ConvertBf16ToFp32:
162 {
163 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
164 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
165 result = layerSupportObject->IsConvertBf16ToFp32Supported(input, output, reason);
166 break;
167 }
telsoa01c577f2c2018-08-31 09:22:23 +0100168 case LayerType::ConvertFp16ToFp32:
169 {
170 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
171 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100172 result = layerSupportObject->IsConvertFp16ToFp32Supported(input, output, reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100173 break;
174 }
Narumol Prangnawaratea54a012020-03-16 16:36:10 +0000175 case LayerType::ConvertFp32ToBf16:
176 {
177 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
178 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
179 result = layerSupportObject->IsConvertFp32ToBf16Supported(input, output, reason);
180 break;
181 }
telsoa01c577f2c2018-08-31 09:22:23 +0100182 case LayerType::ConvertFp32ToFp16:
183 {
184 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
185 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100186 result = layerSupportObject->IsConvertFp32ToFp16Supported(input, output, reason);
telsoa014fcda012018-03-09 14:13:49 +0000187 break;
188 }
189 case LayerType::Convolution2d:
190 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100191 auto cLayer = PolymorphicDowncast<const Convolution2dLayer*>(&layer);
arovir01a6824102018-08-28 17:40:45 +0100192
193 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
194 dataType);
telsoa01c577f2c2018-08-31 09:22:23 +0100195 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100196 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
surmeh013537c2c2018-05-18 16:31:43 +0100197
arovir01a6824102018-08-28 17:40:45 +0100198 const Convolution2dDescriptor& descriptor = cLayer->GetParameters();
surmeh013537c2c2018-05-18 16:31:43 +0100199
arovir01a6824102018-08-28 17:40:45 +0100200 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100201 Optional<TensorInfo> biases;
surmeh013537c2c2018-05-18 16:31:43 +0100202 if (descriptor.m_BiasEnabled)
203 {
David Beck5eec11d2018-10-04 15:43:17 +0100204 biases =
205 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
surmeh013537c2c2018-05-18 16:31:43 +0100206 }
207
David Beck33f0ae02018-10-18 15:13:56 +0100208 result = layerSupportObject->IsConvolution2dSupported(
surmeh013537c2c2018-05-18 16:31:43 +0100209 input,
210 output,
211 descriptor,
telsoa01c577f2c2018-08-31 09:22:23 +0100212 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100213 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100214 reason);
telsoa014fcda012018-03-09 14:13:49 +0000215 break;
216 }
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000217 case LayerType::Debug:
218 {
219 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
220 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
221
222 result = layerSupportObject->IsDebugSupported(OverrideDataType(input, dataType),
223 OverrideDataType(output, dataType),
224 reason);
225 break;
226 }
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100227 case LayerType::DepthToSpace:
228 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100229 auto cLayer = PolymorphicDowncast<const DepthToSpaceLayer*>(&layer);
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100230
231 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
232 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
233
234 result = layerSupportObject->IsDepthToSpaceSupported(OverrideDataType(input, dataType),
235 OverrideDataType(output, dataType),
236 cLayer->GetParameters(),
237 reason);
238 break;
239 }
telsoa014fcda012018-03-09 14:13:49 +0000240 case LayerType::DepthwiseConvolution2d:
241 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100242 auto cLayer = PolymorphicDowncast<const DepthwiseConvolution2dLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100243 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
244 dataType);
245 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100246 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100247
telsoa01c577f2c2018-08-31 09:22:23 +0100248 const DepthwiseConvolution2dDescriptor& descriptor = cLayer->GetParameters();
arovir01a6824102018-08-28 17:40:45 +0100249
250 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100251 Optional<TensorInfo> biases;
telsoa01c577f2c2018-08-31 09:22:23 +0100252 if (descriptor.m_BiasEnabled)
253 {
David Beck5eec11d2018-10-04 15:43:17 +0100254 biases =
255 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
telsoa01c577f2c2018-08-31 09:22:23 +0100256 }
telsoa01c577f2c2018-08-31 09:22:23 +0100257
David Beck33f0ae02018-10-18 15:13:56 +0100258 result = layerSupportObject->IsDepthwiseConvolutionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100259 input,
260 output,
261 descriptor,
262 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100263 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100264 reason);
telsoa014fcda012018-03-09 14:13:49 +0000265 break;
266 }
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000267 case LayerType::Dequantize:
268 {
269 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
270 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
271
Aron Virginas-Tar87972be2019-11-13 15:16:28 +0000272 result = layerSupportObject->IsDequantizeSupported(input,
273 OverrideDataType(output, dataType),
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000274 reason);
275 break;
276 }
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000277 case LayerType::DetectionPostProcess:
278 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100279 auto cLayer = PolymorphicDowncast<const DetectionPostProcessLayer*>(&layer);
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000280 const TensorInfo& boxEncodings = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
281 const TensorInfo& scores = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
282 const TensorInfo& anchors = cLayer->m_Anchors->GetTensorInfo();
283
284 const TensorInfo& detectionBoxes = layer.GetOutputSlot(0).GetTensorInfo();
285 const TensorInfo& detectionClasses = layer.GetOutputSlot(1).GetTensorInfo();
286 const TensorInfo& detectionScores = layer.GetOutputSlot(2).GetTensorInfo();
287 const TensorInfo& numDetections = layer.GetOutputSlot(3).GetTensorInfo();
288
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000289 const DetectionPostProcessDescriptor& descriptor = cLayer->GetParameters();
Derek Lamberti6a5e5e82019-12-05 14:41:20 +0000290 result = layerSupportObject->IsDetectionPostProcessSupported(boxEncodings,
291 scores,
292 anchors,
293 detectionBoxes,
294 detectionClasses,
295 detectionScores,
296 numDetections,
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000297 descriptor,
298 reason);
299 break;
300 }
josh minor4a3c6102020-01-06 16:40:46 -0600301 case LayerType::ElementwiseUnary:
302 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100303 auto cLayer = PolymorphicDowncast<const ElementwiseUnaryLayer*>(&layer);
josh minor4a3c6102020-01-06 16:40:46 -0600304
305 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
306 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
307
308 result = layerSupportObject->IsElementwiseUnarySupported(OverrideDataType(input, dataType),
309 OverrideDataType(output, dataType),
310 cLayer->GetParameters(),
311 reason);
312 break;
313 }
Ryan OSheaec6c6802020-06-05 17:17:06 +0100314 case LayerType::Fill:
315 {
316 auto cLayer = PolymorphicDowncast<const FillLayer*>(&layer);
317 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
318 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
319 const FillDescriptor& descriptor = cLayer->GetParameters();
320
321 result = layerSupportObject->IsFillSupported(
322 OverrideDataType(input, dataType),
323 OverrideDataType(output, dataType),
324 descriptor,
325 reason);
326 break;
327 }
telsoa014fcda012018-03-09 14:13:49 +0000328 case LayerType::FakeQuantization:
329 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100330 auto cLayer = PolymorphicDowncast<const FakeQuantizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000331 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100332 result = layerSupportObject->IsFakeQuantizationSupported(OverrideDataType(input, dataType),
333 cLayer->GetParameters(),
334 reason);
telsoa014fcda012018-03-09 14:13:49 +0000335 break;
336 }
337 case LayerType::Floor:
338 {
339 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
340 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100341 result = layerSupportObject->IsFloorSupported(OverrideDataType(input, dataType),
342 OverrideDataType(output, dataType),
343 reason);
telsoa014fcda012018-03-09 14:13:49 +0000344 break;
345 }
346 case LayerType::FullyConnected:
347 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100348 auto cLayer = PolymorphicDowncast<const FullyConnectedLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000349 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100350 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100351 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100352
353 TensorInfo biasInfo;
354 const TensorInfo * biasInfoPtr = nullptr;
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000355 static const TensorInfo dummyBFloat16Bias(TensorShape({1,1,1,1}), DataType::BFloat16);
telsoa01c577f2c2018-08-31 09:22:23 +0100356 static const TensorInfo dummyFloat16Bias(TensorShape({1,1,1,1}), DataType::Float16);
357 static const TensorInfo dummyFloat32Bias(TensorShape({1,1,1,1}), DataType::Float32);
358 static const TensorInfo dummyQA8Bias(TensorShape({1,1,1,1}), DataType::Signed32);
359
360 const FullyConnectedDescriptor& descriptor = cLayer->GetParameters();
361 if (descriptor.m_BiasEnabled)
362 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100363 ARMNN_ASSERT(cLayer->m_Bias.get() != nullptr);
telsoa01c577f2c2018-08-31 09:22:23 +0100364 biasInfo = OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
365 biasInfoPtr = &biasInfo;
366 }
367 else
368 {
369 // If biases are not enabled pass a dummy tensorinfo for the validation
370 switch(input.GetDataType())
371 {
Narumol Prangnawarat44179c32020-03-11 14:51:27 +0000372 case DataType::BFloat16:
373 {
374 biasInfoPtr = &dummyBFloat16Bias;
375 break;
376 }
telsoa01c577f2c2018-08-31 09:22:23 +0100377 case DataType::Float16:
378 {
379 biasInfoPtr = &dummyFloat16Bias;
380 break;
381 }
382 case DataType::Float32:
383 {
384 biasInfoPtr = &dummyFloat32Bias;
385 break;
386 }
Derek Lambertif90c56d2020-01-10 17:14:08 +0000387 case DataType::QAsymmU8:
Keith Davisa8565012020-02-14 12:22:40 +0000388 case DataType::QAsymmS8:
Keith Davis9d0ff742020-02-03 14:47:54 +0000389 case DataType::QSymmS8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000390 case DataType::QSymmS16:
telsoa01c577f2c2018-08-31 09:22:23 +0100391 {
392 biasInfoPtr = &dummyQA8Bias;
393 break;
394 }
395 default:
396 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100397 ARMNN_ASSERT_MSG(false, "Unexpected bias type");
telsoa01c577f2c2018-08-31 09:22:23 +0100398 }
399 }
400 }
401
David Beck33f0ae02018-10-18 15:13:56 +0100402 result = layerSupportObject->IsFullyConnectedSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100403 OverrideDataType(input, dataType),
404 OverrideDataType(output, dataType),
405 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
406 *biasInfoPtr,
407 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100408 reason);
telsoa014fcda012018-03-09 14:13:49 +0000409 break;
410 }
narpra01b89b05f2019-01-16 09:53:09 +0000411 case LayerType::Gather:
412 {
413 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
414 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
415 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Teresa Charlin52664732020-06-29 16:27:03 +0100416 auto cLayer = PolymorphicDowncast<const GatherLayer*>(&layer);
417 const GatherDescriptor& descriptor = cLayer->GetParameters();
narpra01b89b05f2019-01-16 09:53:09 +0000418 result = layerSupportObject->IsGatherSupported(OverrideDataType(input0, dataType),
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100419 input1,
narpra01b89b05f2019-01-16 09:53:09 +0000420 OverrideDataType(output, dataType),
Teresa Charlin52664732020-06-29 16:27:03 +0100421 descriptor,
narpra01b89b05f2019-01-16 09:53:09 +0000422 reason);
423 break;
424 }
telsoa014fcda012018-03-09 14:13:49 +0000425 case LayerType::Input:
426 {
427 const TensorInfo& input = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100428 result = layerSupportObject->IsInputSupported(OverrideDataType(input, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000429 break;
430 }
Kevin Mayce5045a2019-10-02 14:07:47 +0100431 case LayerType::InstanceNormalization:
432 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100433 auto cLayer = PolymorphicDowncast<const InstanceNormalizationLayer*>(&layer);
Kevin Mayce5045a2019-10-02 14:07:47 +0100434 const InstanceNormalizationDescriptor& descriptor = cLayer->GetParameters();
435
436 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
437 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
438
439 result = layerSupportObject->IsInstanceNormalizationSupported(
440 OverrideDataType(input, dataType),
441 OverrideDataType(output, dataType),
442 descriptor,
443 reason);
444 break;
445 }
telsoa014fcda012018-03-09 14:13:49 +0000446 case LayerType::L2Normalization:
447 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100448 auto cLayer = PolymorphicDowncast<const L2NormalizationLayer*>(&layer);
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100449 const L2NormalizationDescriptor& descriptor = cLayer->GetParameters();
450
telsoa014fcda012018-03-09 14:13:49 +0000451 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100452 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100453
David Beck33f0ae02018-10-18 15:13:56 +0100454 result = layerSupportObject->IsL2NormalizationSupported(
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100455 OverrideDataType(input, dataType),
456 OverrideDataType(output, dataType),
457 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100458 reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100459 break;
460 }
James Conroyaba90cd2020-11-06 16:28:18 +0000461 case LayerType::LogicalBinary:
462 {
463 auto cLayer = PolymorphicDowncast<const LogicalBinaryLayer*>(&layer);
464
465 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
466 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
467 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
468
469 result = layerSupportObject->IsLogicalBinarySupported(input0,
470 input1,
471 output,
472 cLayer->GetParameters(),
473 reason);
474 break;
475 }
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100476 case LayerType::LogSoftmax:
477 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100478 auto cLayer = PolymorphicDowncast<const LogSoftmaxLayer*>(&layer);
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100479
480 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
481 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
482
483 result = layerSupportObject->IsLogSoftmaxSupported(OverrideDataType(input, dataType),
484 OverrideDataType(output, dataType),
485 cLayer->GetParameters(),
486 reason);
487 break;
488 }
telsoa01c577f2c2018-08-31 09:22:23 +0100489 case LayerType::Lstm:
490 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100491 auto cLayer = PolymorphicDowncast<const LstmLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100492 const LstmDescriptor& descriptor = cLayer->GetParameters();
493
494 // All inputs.
495 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
496 dataType);
497 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
498 dataType);
499 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
500 dataType);
501 // All outputs
502 const TensorInfo& scratchBuffer = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
503 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
504 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
505 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(3).GetTensorInfo(), dataType);
506
507 // Basic parameters
508 const TensorInfo& inputToForgetWeights
509 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
510 const TensorInfo& inputToCellWeights
511 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
512 const TensorInfo& inputToOutputWeights
513 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
514 const TensorInfo& recurrentToForgetWeights
515 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
516 const TensorInfo& recurrentToCellWeights
517 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
518 const TensorInfo& recurrentToOutputWeights
519 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
520 const TensorInfo& forgetGateBias
521 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
522 const TensorInfo& cellBias
523 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
524 const TensorInfo& outputGateBias
525 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
526
Jan Eilersd01a83c2019-07-03 18:20:40 +0100527 LstmInputParamsInfo paramsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100528
Jan Eilersd01a83c2019-07-03 18:20:40 +0100529 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
530 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
531 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
532 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
533 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
534 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
535 paramsInfo.m_ForgetGateBias = &forgetGateBias;
536 paramsInfo.m_CellBias = &cellBias;
537 paramsInfo.m_OutputGateBias = &outputGateBias;
538
539
540 // Optional parameters
telsoa01c577f2c2018-08-31 09:22:23 +0100541 TensorInfo optInputToInputWeights;
542 TensorInfo optRecurrentToInputWeights;
543 TensorInfo optCellToInputWeights;
544 TensorInfo optInputGateBias;
545 TensorInfo optProjectionWeights;
546 TensorInfo optProjectionBias;
547 TensorInfo optCellToForgetWeights;
548 TensorInfo optCellToOutputWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100549 TensorInfo optInputLayerNormWeights;
550 TensorInfo optForgetLayerNormWeights;
551 TensorInfo optCellLayerNormWeights;
552 TensorInfo optOutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100553
554 if(!descriptor.m_CifgEnabled)
555 {
556 optInputToInputWeights =
557 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100558 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100559
560 optRecurrentToInputWeights =
561 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100562 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100563 optInputGateBias =
564 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100565 paramsInfo.m_InputGateBias = &optInputGateBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100566 }
567
568 if(descriptor.m_ProjectionEnabled)
569 {
570 optProjectionWeights =
571 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100572 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100573 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
574 {
575 optProjectionBias =
576 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100577 paramsInfo.m_ProjectionBias = &optProjectionBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100578 }
579 }
580
581 if(descriptor.m_PeepholeEnabled)
582 {
Jan Eilerse2062cd2020-03-30 15:07:45 +0100583 if(!descriptor.m_CifgEnabled)
584 {
585 optCellToInputWeights =
586 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
587 dataType);
588 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
589 }
telsoa01c577f2c2018-08-31 09:22:23 +0100590 optCellToForgetWeights =
591 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100592 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100593 optCellToOutputWeights =
594 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100595 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100596 }
597
Jan Eilers38e05bd2019-06-26 13:10:09 +0100598 if(descriptor.m_LayerNormEnabled)
599 {
Ferran Balaguere30c16e2019-07-24 17:03:45 +0100600 if (!descriptor.m_CifgEnabled)
601 {
602 optInputLayerNormWeights = OverrideDataType(
603 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
604 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
605 }
Jan Eilers38e05bd2019-06-26 13:10:09 +0100606
607 optForgetLayerNormWeights = OverrideDataType(
608 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100609 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100610
611 optCellLayerNormWeights = OverrideDataType(
612 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100613 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100614
615 optOutputLayerNormWeights = OverrideDataType(
616 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100617 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100618 }
619
David Beck33f0ae02018-10-18 15:13:56 +0100620 result = layerSupportObject->IsLstmSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100621 input,
622 outputStateIn,
623 cellStateIn,
624 scratchBuffer,
625 outputStateOut,
626 cellStateOut,
627 output,
628 descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +0100629 paramsInfo,
630 reason);
telsoa014fcda012018-03-09 14:13:49 +0000631 break;
632 }
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000633 case LayerType::Maximum:
634 {
635 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
636 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
637 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
638
639 result = layerSupportObject->IsMaximumSupported(OverrideDataType(input0, dataType),
640 OverrideDataType(input1, dataType),
641 OverrideDataType(output, dataType),
642 reason);
643 break;
644 }
narpra01b89b05f2019-01-16 09:53:09 +0000645 case LayerType::MemCopy:
646 {
647 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
648 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000649
narpra01b89b05f2019-01-16 09:53:09 +0000650 result = layerSupportObject->IsMemCopySupported(OverrideDataType(input, dataType),
651 OverrideDataType(output, dataType),
652 reason);
653 break;
654 }
Derek Lambertif674aa02019-08-01 15:56:25 +0100655 case LayerType::MemImport:
656 {
657 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
658 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
659
660 result = layerSupportObject->IsMemImportSupported(OverrideDataType(input, dataType),
661 OverrideDataType(output, dataType),
662 reason);
663 break;
664 }
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100665 case LayerType::Merge:
666 {
667 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
668 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
669 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
670
671 result = layerSupportObject->IsMergeSupported(OverrideDataType(input0, dataType),
672 OverrideDataType(input1, dataType),
673 OverrideDataType(output, dataType),
674 reason);
675 break;
676 }
Jim Flynne242f2d2019-05-22 14:24:13 +0100677 case LayerType::Concat:
telsoa014fcda012018-03-09 14:13:49 +0000678 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100679 auto cLayer = PolymorphicDowncast<const ConcatLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000680
telsoa01c577f2c2018-08-31 09:22:23 +0100681 // Get vector of all inputs.
682 auto getTensorInfo = [&dataType](const InputSlot& slot)
telsoa014fcda012018-03-09 14:13:49 +0000683 {
telsoa01c577f2c2018-08-31 09:22:23 +0100684 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
telsoa014fcda012018-03-09 14:13:49 +0000685 };
Finn Williams3e54d032020-10-22 16:53:35 +0100686
687 auto beginI = MakeTransformIterator(layer.GetInputSlots().begin(), getTensorInfo);
688 auto endI = MakeTransformIterator(layer.GetInputSlots().end(), getTensorInfo);
telsoa01c577f2c2018-08-31 09:22:23 +0100689 std::vector<TensorInfo> inputs(beginI, endI);
telsoa014fcda012018-03-09 14:13:49 +0000690
telsoa01c577f2c2018-08-31 09:22:23 +0100691 auto getTensorInfoPtr = [](const TensorInfo& info)
692 {
693 return &info;
694 };
Finn Williams3e54d032020-10-22 16:53:35 +0100695
696 auto beginPtr = MakeTransformIterator(inputs.begin(), getTensorInfoPtr);
697 auto endPtr = MakeTransformIterator(inputs.end(), getTensorInfoPtr);
telsoa01c577f2c2018-08-31 09:22:23 +0100698 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
telsoa014fcda012018-03-09 14:13:49 +0000699
Nikhil Raj8599a412018-11-19 14:51:07 +0000700 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
701
Jim Flynne242f2d2019-05-22 14:24:13 +0100702 result = layerSupportObject->IsConcatSupported(inputPtrs, output, cLayer->GetParameters(), reason);
703
704
telsoa014fcda012018-03-09 14:13:49 +0000705 break;
706 }
707 case LayerType::Multiplication:
708 {
709 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
710 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100711 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100712 result = layerSupportObject->IsMultiplicationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100713 OverrideDataType(input0, dataType),
714 OverrideDataType(input1, dataType),
715 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100716 reason);
telsoa014fcda012018-03-09 14:13:49 +0000717 break;
718 }
719 case LayerType::Normalization:
720 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100721 auto cLayer = PolymorphicDowncast<const NormalizationLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000722 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
723 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100724 result = layerSupportObject->IsNormalizationSupported(OverrideDataType(input, dataType),
725 OverrideDataType(output, dataType),
726 cLayer->GetParameters(),
727 reason);
telsoa014fcda012018-03-09 14:13:49 +0000728 break;
729 }
730 case LayerType::Output:
731 {
732 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100733 result = layerSupportObject->IsOutputSupported(OverrideDataType(output, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000734 break;
735 }
736 case LayerType::Permute:
737 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100738 auto cLayer = PolymorphicDowncast<const PermuteLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000739 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
740 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100741 result = layerSupportObject->IsPermuteSupported(OverrideDataType(input, dataType),
742 OverrideDataType(output, dataType),
743 cLayer->GetParameters(),
744 reason);
telsoa014fcda012018-03-09 14:13:49 +0000745 break;
746 }
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100747 case LayerType::Pad:
748 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100749 auto cLayer = PolymorphicDowncast<const PadLayer*>(&layer);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100750 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
751 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100752 result = layerSupportObject->IsPadSupported(
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100753 OverrideDataType(input, dataType),
754 OverrideDataType(output, dataType),
755 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100756 reason);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100757 break;
758 }
telsoa014fcda012018-03-09 14:13:49 +0000759 case LayerType::Pooling2d:
760 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100761 auto cLayer = PolymorphicDowncast<const Pooling2dLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000762 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
763 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100764 result = layerSupportObject->IsPooling2dSupported(OverrideDataType(input, dataType),
765 OverrideDataType(output, dataType),
766 cLayer->GetParameters(),
767 reason);
telsoa014fcda012018-03-09 14:13:49 +0000768 break;
769 }
Matteo Martincigh49124022019-01-11 13:25:59 +0000770 case LayerType::PreCompiled:
771 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100772 auto cLayer = PolymorphicDowncast<const PreCompiledLayer*>(&layer);
Matteo Martincigh49124022019-01-11 13:25:59 +0000773 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
774 result = layerSupportObject->IsPreCompiledSupported(OverrideDataType(input, dataType),
775 cLayer->GetParameters(),
776 reason);
777 break;
778 }
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000779 case LayerType::Quantize:
780 {
781 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
782 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
783 result = layerSupportObject->IsQuantizeSupported(input, output, reason);
784 break;
785 }
James Conroy586a9aa2020-03-20 08:49:33 +0000786 case LayerType::QLstm:
787 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100788 auto cLayer = PolymorphicDowncast<const QLstmLayer*>(&layer);
James Conroy586a9aa2020-03-20 08:49:33 +0000789 const QLstmDescriptor& descriptor = cLayer->GetParameters();
790
791 // Inputs
792 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
793 const TensorInfo& previousOutputIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
794 const TensorInfo& previousCellStateIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
795
796 // Outputs
797 const TensorInfo& outputStateOut = layer.GetOutputSlot(0).GetTensorInfo();
798 const TensorInfo& cellStateOut = layer.GetOutputSlot(1).GetTensorInfo();
799 const TensorInfo& output = layer.GetOutputSlot(2).GetTensorInfo();
800
801 // Lstm parameters
802 LstmInputParamsInfo paramsInfo;
803
804 // Basic parameters
805 paramsInfo.m_InputToForgetWeights = &cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo();
806 paramsInfo.m_InputToCellWeights = &cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo();
807 paramsInfo.m_InputToOutputWeights = &cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo();
808
809 paramsInfo.m_RecurrentToForgetWeights =
810 &cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo();
811 paramsInfo.m_RecurrentToCellWeights =
812 &cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo();
813 paramsInfo.m_RecurrentToOutputWeights =
814 &cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo();
815
816 paramsInfo.m_ForgetGateBias = &cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo();
817 paramsInfo.m_CellBias = &cLayer->m_BasicParameters.m_CellBias->GetTensorInfo();
818 paramsInfo.m_OutputGateBias = &cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo();
819
820 if(!descriptor.m_CifgEnabled)
821 {
822 paramsInfo.m_InputToInputWeights = &cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo();
823 paramsInfo.m_RecurrentToInputWeights =
824 &cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo();
825 paramsInfo.m_InputGateBias = &cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo();
826 }
827
828 if(descriptor.m_ProjectionEnabled)
829 {
830 paramsInfo.m_ProjectionWeights = &cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo();
James Conroyed324052020-05-18 15:16:42 +0100831
832 // Projection bias is optional even if projection is enabled
833 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
834 {
835 paramsInfo.m_ProjectionBias = &cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo();
836 }
James Conroy586a9aa2020-03-20 08:49:33 +0000837 }
838
839 if(descriptor.m_PeepholeEnabled)
840 {
841 if (!descriptor.m_CifgEnabled)
842 {
843 paramsInfo.m_CellToInputWeights =
844 &cLayer->m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo();
845 }
846
847 paramsInfo.m_CellToForgetWeights =
848 &cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo();
849 paramsInfo.m_CellToOutputWeights = &cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo();
850 }
851
852 if(descriptor.m_LayerNormEnabled)
853 {
854 if (!descriptor.m_CifgEnabled)
855 {
856 paramsInfo.m_InputLayerNormWeights =
857 &cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo();
858 }
859
860 paramsInfo.m_ForgetLayerNormWeights =
861 &cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo();
862 paramsInfo.m_CellLayerNormWeights =
863 &cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo();
864 paramsInfo.m_OutputLayerNormWeights =
865 &cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo();
866 }
867
868 result = layerSupportObject->IsQLstmSupported(input,
869 previousOutputIn,
870 previousCellStateIn,
871 outputStateOut,
872 cellStateOut,
873 output,
874 descriptor,
875 paramsInfo,
876 reason);
877 break;
878 }
James Conroyee18dc82019-07-17 11:27:46 +0100879 case LayerType::QuantizedLstm:
880 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100881 auto cLayer = PolymorphicDowncast<const QuantizedLstmLayer*>(&layer);
James Conroyee18dc82019-07-17 11:27:46 +0100882
883 // Inputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100884 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
885 const TensorInfo& previousCellStateIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
886 const TensorInfo& previousOutputIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100887
888 // Outputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100889 const TensorInfo& cellStateOut = layer.GetOutputSlot(0).GetTensorInfo();
890 const TensorInfo& output = layer.GetOutputSlot(1).GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100891
892 // QuantizedLstm parameters
James Conroyee18dc82019-07-17 11:27:46 +0100893 QuantizedLstmInputParamsInfo paramsInfo;
894
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100895 paramsInfo.m_InputToInputWeights =
896 &cLayer->m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo();
897 paramsInfo.m_InputToForgetWeights =
898 &cLayer->m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo();
899 paramsInfo.m_InputToCellWeights =
900 &cLayer->m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo();
901 paramsInfo.m_InputToOutputWeights =
902 &cLayer->m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100903
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100904 paramsInfo.m_RecurrentToInputWeights =
905 &cLayer->m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo();
906 paramsInfo.m_RecurrentToForgetWeights =
907 &cLayer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo();
908 paramsInfo.m_RecurrentToCellWeights =
909 &cLayer->m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo();
910 paramsInfo.m_RecurrentToOutputWeights =
911 &cLayer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100912
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100913 paramsInfo.m_InputGateBias =
914 &cLayer->m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo();
915 paramsInfo.m_ForgetGateBias =
916 &cLayer->m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo();
917 paramsInfo.m_CellBias =
918 &cLayer->m_QuantizedLstmParameters.m_CellBias->GetTensorInfo();
919 paramsInfo.m_OutputGateBias =
920 &cLayer->m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo();;
James Conroyee18dc82019-07-17 11:27:46 +0100921
922 result = layerSupportObject->IsQuantizedLstmSupported(input,
923 previousCellStateIn,
924 previousOutputIn,
925 cellStateOut,
926 output,
927 paramsInfo,
928 reason);
929 break;
930 }
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100931 case LayerType::Division:
932 {
933 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
934 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
935 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100936 result = layerSupportObject->IsDivisionSupported(
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100937 OverrideDataType(input0, dataType),
938 OverrideDataType(input1, dataType),
939 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100940 reason);
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100941 break;
942 }
Finn Williams2605b232020-06-10 15:53:46 +0100943 case LayerType::Rank:
944 {
945 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
946 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
947 result = layerSupportObject->IsRankSupported(OverrideDataType(input, dataType),
948 OverrideDataType(output, dataType),
949 reason);
950 break;
951 }
telsoa014fcda012018-03-09 14:13:49 +0000952 case LayerType::Reshape:
953 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100954 auto cLayer = PolymorphicDowncast<const ReshapeLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000955 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Kevin Maya023c402019-12-12 17:28:05 +0000956 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000957 result = layerSupportObject->IsReshapeSupported(OverrideDataType(input, dataType),
Kevin Maya023c402019-12-12 17:28:05 +0000958 OverrideDataType(output, dataType),
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000959 cLayer->GetParameters(),
960 reason);
telsoa014fcda012018-03-09 14:13:49 +0000961 break;
962 }
Teresa Charlina9075df2019-06-27 15:41:57 +0100963 case LayerType::Resize:
964 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100965 auto cLayer = PolymorphicDowncast<const ResizeLayer*>(&layer);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100966 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Teresa Charlina9075df2019-06-27 15:41:57 +0100967 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
968 result = layerSupportObject->IsResizeSupported(OverrideDataType(input, dataType),
969 OverrideDataType(output, dataType),
970 cLayer->GetParameters(),
971 reason);
972 break;
973 }
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100974 case LayerType::Slice:
975 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100976 auto cLayer = PolymorphicDowncast<const SliceLayer*>(&layer);
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100977
978 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
979 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
980
981 result = layerSupportObject->IsSliceSupported(OverrideDataType(input, dataType),
982 OverrideDataType(output, dataType),
983 cLayer->GetParameters(),
984 reason);
985 break;
986 }
telsoa014fcda012018-03-09 14:13:49 +0000987 case LayerType::Softmax:
988 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100989 auto cLayer = PolymorphicDowncast<const SoftmaxLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000990 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100991 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100992 result = layerSupportObject->IsSoftmaxSupported(OverrideDataType(input, dataType),
993 OverrideDataType(output, dataType),
994 cLayer->GetParameters(),
995 reason);
telsoa014fcda012018-03-09 14:13:49 +0000996 break;
997 }
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000998 case LayerType::SpaceToBatchNd:
999 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001000 auto cLayer = PolymorphicDowncast<const SpaceToBatchNdLayer*>(&layer);
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +00001001 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1002 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1003 result = layerSupportObject->IsSpaceToBatchNdSupported(OverrideDataType(input, dataType),
1004 OverrideDataType(output, dataType),
1005 cLayer->GetParameters(),
1006 reason);
1007 break;
1008 }
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001009 case LayerType::SpaceToDepth:
1010 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001011 auto cLayer = PolymorphicDowncast<const SpaceToDepthLayer*>(&layer);
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001012
1013 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1014 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1015
1016 result = layerSupportObject->IsSpaceToDepthSupported(OverrideDataType(input, dataType),
1017 OverrideDataType(output, dataType),
1018 cLayer->GetParameters(),
1019 reason);
1020 break;
1021 }
telsoa014fcda012018-03-09 14:13:49 +00001022 case LayerType::Splitter:
1023 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001024 auto cLayer = PolymorphicDowncast<const SplitterLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +00001025 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001026
1027 // Get vector of all outputs.
1028 auto getTensorInfo = [&dataType](const OutputSlot& slot)
1029 {
1030 return OverrideDataType(slot.GetTensorInfo(), dataType);
1031 };
Finn Williams3e54d032020-10-22 16:53:35 +01001032 auto beginI = MakeTransformIterator(layer.GetOutputSlots().begin(), getTensorInfo);
1033 auto endI = MakeTransformIterator(layer.GetOutputSlots().end(), getTensorInfo);
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001034 std::vector<TensorInfo> outputs(beginI, endI);
1035
1036 const std::vector<std::reference_wrapper<TensorInfo>> outputPtrs(outputs.begin(), outputs.end());
1037
David Beck33f0ae02018-10-18 15:13:56 +01001038 result = layerSupportObject->IsSplitterSupported(OverrideDataType(input, dataType),
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +01001039 outputPtrs,
David Beck33f0ae02018-10-18 15:13:56 +01001040 cLayer->GetParameters(),
1041 reason);
telsoa014fcda012018-03-09 14:13:49 +00001042 break;
1043 }
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001044 case LayerType::Stack:
1045 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001046 auto cLayer = PolymorphicDowncast<const StackLayer*>(&layer);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001047
1048 // Get vector of all inputs.
1049 auto getTensorInfo = [&dataType](const InputSlot& slot)
1050 {
1051 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
1052 };
Finn Williams3e54d032020-10-22 16:53:35 +01001053 auto beginI = MakeTransformIterator(layer.GetInputSlots().begin(), getTensorInfo);
1054 auto endI = MakeTransformIterator(layer.GetInputSlots().end(), getTensorInfo);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001055 std::vector<TensorInfo> inputs(beginI, endI);
1056
1057 auto getTensorInfoPtr = [](const TensorInfo& info)
1058 {
1059 return &info;
1060 };
Finn Williams3e54d032020-10-22 16:53:35 +01001061 auto beginPtr = MakeTransformIterator(inputs.begin(), getTensorInfoPtr);
1062 auto endPtr = MakeTransformIterator(inputs.end(), getTensorInfoPtr);
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001063 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
1064
1065 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1066
1067 result = layerSupportObject->IsStackSupported(inputPtrs, output, cLayer->GetParameters(), reason);
1068
1069 break;
1070 }
Derek Lamberti013c3902019-10-21 10:46:16 +01001071 case LayerType::StandIn:
1072 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001073 auto cLayer = PolymorphicDowncast<const StandInLayer*>(&layer);
Derek Lamberti013c3902019-10-21 10:46:16 +01001074
1075 // Get vector of all inputs.
1076 auto getTensorInfoIn = [&dataType](const InputSlot& slot)
1077 {
1078 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
1079 };
1080 auto getTensorInfoOut = [&dataType](const OutputSlot& slot)
1081 {
1082 return OverrideDataType(slot.GetTensorInfo(), dataType);
1083 };
Finn Williams3e54d032020-10-22 16:53:35 +01001084 auto beginI = MakeTransformIterator(layer.GetInputSlots().begin(), getTensorInfoIn);
1085 auto endI = MakeTransformIterator(layer.GetInputSlots().end(), getTensorInfoIn);
Derek Lamberti013c3902019-10-21 10:46:16 +01001086 std::vector<TensorInfo> inputs(beginI, endI);
1087
Finn Williams3e54d032020-10-22 16:53:35 +01001088 auto beginO = MakeTransformIterator(layer.GetOutputSlots().begin(), getTensorInfoOut);
1089 auto endO = MakeTransformIterator(layer.GetOutputSlots().end(), getTensorInfoOut);
Derek Lamberti013c3902019-10-21 10:46:16 +01001090 std::vector<TensorInfo> outputs(beginO, endO);
1091
1092
1093 auto getTensorInfoPtr = [](const TensorInfo& info)
1094 {
1095 return &info;
1096 };
Finn Williams3e54d032020-10-22 16:53:35 +01001097 auto beginPtrI = MakeTransformIterator(inputs.begin(), getTensorInfoPtr);
1098 auto endPtrI = MakeTransformIterator(inputs.end(), getTensorInfoPtr);
Derek Lamberti013c3902019-10-21 10:46:16 +01001099 std::vector<const TensorInfo*> inputPtrs(beginPtrI, endPtrI);
1100
Finn Williams3e54d032020-10-22 16:53:35 +01001101 auto beginPtrO = MakeTransformIterator(outputs.begin(), getTensorInfoPtr);
1102 auto endPtrO = MakeTransformIterator(outputs.end(), getTensorInfoPtr);
Derek Lamberti013c3902019-10-21 10:46:16 +01001103 std::vector<const TensorInfo*> outputPtrs(beginPtrO, endPtrO);
1104
1105
1106 result = layerSupportObject->IsStandInSupported(inputPtrs,
1107 outputPtrs,
1108 cLayer->GetParameters(),
1109 reason);
1110 break;
1111 }
Conor Kennedy430b5d82018-11-14 15:28:28 +00001112 case LayerType::StridedSlice:
1113 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001114 auto cLayer = PolymorphicDowncast<const StridedSliceLayer*>(&layer);
Conor Kennedy430b5d82018-11-14 15:28:28 +00001115 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1116 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1117 result = layerSupportObject->IsStridedSliceSupported(OverrideDataType(input, dataType),
1118 OverrideDataType(output, dataType),
1119 cLayer->GetParameters(),
1120 reason);
1121 break;
1122 }
David Beckc2044fe2018-09-05 15:00:38 +01001123 case LayerType::Subtraction:
1124 {
1125 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1126 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1127 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +01001128 result = layerSupportObject->IsSubtractionSupported(
David Beckc2044fe2018-09-05 15:00:38 +01001129 OverrideDataType(input0, dataType),
1130 OverrideDataType(input1, dataType),
1131 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +01001132 reason);
David Beckc2044fe2018-09-05 15:00:38 +01001133 break;
1134 }
Sadik Armaganeff363d2019-04-05 15:25:46 +01001135 case LayerType::Switch:
1136 {
1137 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1138 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1139 const TensorInfo& output0 = layer.GetOutputSlot(0).GetTensorInfo();
1140 const TensorInfo& output1 = layer.GetOutputSlot(1).GetTensorInfo();
1141 result = layerSupportObject->IsSwitchSupported(OverrideDataType(input0, dataType),
1142 OverrideDataType(input1, dataType),
1143 OverrideDataType(output0, dataType),
1144 OverrideDataType(output1, dataType),
1145 reason);
1146 break;
1147 }
narpra0132b90462018-09-13 11:07:48 +01001148 case LayerType::Mean:
1149 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001150 auto cLayer = PolymorphicDowncast<const MeanLayer*>(&layer);
narpra0132b90462018-09-13 11:07:48 +01001151 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1152 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +01001153 result = layerSupportObject->IsMeanSupported(
narpra0132b90462018-09-13 11:07:48 +01001154 OverrideDataType(input, dataType),
1155 OverrideDataType(output, dataType),
1156 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +01001157 reason);
narpra0132b90462018-09-13 11:07:48 +01001158 break;
1159 }
kevmay0190539692018-11-29 08:40:19 +00001160 case LayerType::Minimum:
1161 {
1162 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1163 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1164 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1165 result = layerSupportObject->IsMinimumSupported(OverrideDataType(input0, dataType),
1166 OverrideDataType(input1, dataType),
1167 OverrideDataType(output, dataType),
1168 reason);
1169 break;
1170 }
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001171 case LayerType::Prelu:
1172 {
1173 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1174 const TensorInfo& alpha = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1175 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1176 result = layerSupportObject->IsPreluSupported(OverrideDataType(input, dataType),
1177 OverrideDataType(alpha, dataType),
1178 OverrideDataType(output, dataType),
1179 reason);
1180 break;
1181 }
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001182 case LayerType::Transpose:
1183 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001184 auto cLayer = PolymorphicDowncast<const TransposeLayer*>(&layer);
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001185 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1186 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1187 result = layerSupportObject->IsTransposeSupported(OverrideDataType(input, dataType),
1188 OverrideDataType(output, dataType),
1189 cLayer->GetParameters(),
1190 reason);
1191 break;
1192 }
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001193 case LayerType::TransposeConvolution2d:
1194 {
Jan Eilersbb446e52020-04-02 13:56:54 +01001195 auto cLayer = PolymorphicDowncast<const TransposeConvolution2dLayer*>(&layer);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001196
1197 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1198 dataType);
1199 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1200
1201 const TransposeConvolution2dDescriptor& descriptor = cLayer->GetParameters();
1202
1203 Optional<TensorInfo> biases;
1204 if (descriptor.m_BiasEnabled)
1205 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001206 ARMNN_ASSERT(cLayer->m_Bias.get() != nullptr);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001207 biases = OverrideDataType(cLayer->m_Bias->GetTensorInfo(),
1208 GetBiasTypeFromWeightsType(dataType));
1209 }
1210
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001211 ARMNN_ASSERT(cLayer->m_Weight.get() != nullptr);
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001212 const TensorInfo weights = OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType);
1213
1214 result = layerSupportObject->IsTransposeConvolution2dSupported(input,
1215 output,
1216 descriptor,
1217 weights,
1218 biases,
1219 reason);
1220
1221 break;
1222 }
telsoa014fcda012018-03-09 14:13:49 +00001223 default:
1224 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001225 ARMNN_ASSERT_MSG(false, "WorkloadFactory did not recognise type of layer.");
David Beck33f0ae02018-10-18 15:13:56 +01001226 reason.value() = "Unrecognised layer type";
telsoa014fcda012018-03-09 14:13:49 +00001227 result = false;
1228 break;
1229 }
1230 }
telsoa014fcda012018-03-09 14:13:49 +00001231 return result;
1232}
1233
Sadik Armagan045f6be2020-09-10 13:37:32 +01001234bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
1235 const IConnectableLayer& connectableLayer,
1236 Optional<DataType> dataType,
1237 std::string& outReasonIfUnsupported)
1238{
1239 return IsLayerConfigurationSupported(backendId, connectableLayer, dataType, outReasonIfUnsupported);
1240}
1241
David Beckdcb751f2018-10-03 11:42:42 +01001242bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +01001243 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +01001244 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +00001245{
Jan Eilersbb446e52020-04-02 13:56:54 +01001246 auto layer = PolymorphicDowncast<const Layer*>(&connectableLayer);
Sadik Armagan045f6be2020-09-10 13:37:32 +01001247 return IsLayerConfigurationSupported(layer->GetBackendId(), connectableLayer, dataType, outReasonIfUnsupported);
1248}
1249
1250// TODO merge with defaulted modelOptions above
1251bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
1252 Optional<DataType> dataType,
1253 std::string& outReasonIfUnsupported,
1254 const ModelOptions& modelOptions)
1255{
1256 auto layer = PolymorphicDowncast<const Layer*>(&connectableLayer);
1257 return IsLayerConfigurationSupported(layer->GetBackendId(),
1258 connectableLayer,
1259 dataType,
1260 outReasonIfUnsupported,
1261 modelOptions);
telsoa014fcda012018-03-09 14:13:49 +00001262}
1263
Sadik Armagan04a72972020-09-14 15:44:18 +01001264bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
1265 const IConnectableLayer& connectableLayer,
1266 Optional<DataType> dataType,
1267 std::string& outReasonIfUnsupported,
1268 const ModelOptions& modelOptions)
1269{
1270 return IsLayerConfigurationSupported(backendId,
1271 connectableLayer,
1272 dataType,
1273 outReasonIfUnsupported,
1274 modelOptions);
1275}
1276
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001277// Default Implementations
Derek Lamberti901ea112019-12-10 22:07:09 +00001278std::unique_ptr<IWorkload> IWorkloadFactory::CreateAbs(const AbsQueueDescriptor& /*descriptor*/,
1279 const WorkloadInfo& /*info*/) const
Kevin May868eb142019-09-04 17:29:31 +01001280{
1281 return std::unique_ptr<IWorkload>();
1282}
1283
Derek Lamberti901ea112019-12-10 22:07:09 +00001284std::unique_ptr<IWorkload> IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& /*descriptor*/,
1285 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001286{
1287 return std::unique_ptr<IWorkload>();
1288}
1289
Derek Lamberti901ea112019-12-10 22:07:09 +00001290std::unique_ptr<IWorkload> IWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& /*descriptor*/,
1291 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001292{
1293 return std::unique_ptr<IWorkload>();
1294}
1295
Derek Lamberti901ea112019-12-10 22:07:09 +00001296std::unique_ptr<IWorkload> IWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& /*descriptor*/,
1297 const WorkloadInfo& /*info*/) const
Nikhil Rajee391d52019-09-05 17:50:44 +01001298{
1299 return std::unique_ptr<IWorkload>();
1300}
1301
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001302std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00001303 const BatchNormalizationQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001304{
1305 return std::unique_ptr<IWorkload>();
1306}
1307
Derek Lamberti901ea112019-12-10 22:07:09 +00001308std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& /*desc*/,
1309 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001310{
1311 return std::unique_ptr<IWorkload>();
1312}
1313
Derek Lamberti901ea112019-12-10 22:07:09 +00001314std::unique_ptr<IWorkload> IWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& /*descriptor*/,
1315 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001316{
1317 return std::unique_ptr<IWorkload>();
1318}
1319
Derek Lamberti901ea112019-12-10 22:07:09 +00001320std::unique_ptr<IWorkload> IWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& /*descriptor*/,
1321 const WorkloadInfo& /*info*/) const
Jim Flynn4ed6c832019-05-20 11:02:46 +01001322{
1323 return std::unique_ptr<IWorkload>();
1324}
1325
Derek Lamberti901ea112019-12-10 22:07:09 +00001326std::unique_ptr<IWorkload> IWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& /*descriptor*/,
1327 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001328{
1329 return std::unique_ptr<IWorkload>();
1330}
1331
Narumol Prangnawarat7ddbbae2020-03-13 10:26:05 +00001332std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertBf16ToFp32(const ConvertBf16ToFp32QueueDescriptor& /*desc*/,
1333 const WorkloadInfo& /*info*/) const
1334{
1335 return std::unique_ptr<IWorkload>();
1336}
1337
Derek Lamberti901ea112019-12-10 22:07:09 +00001338std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp16ToFp32(const ConvertFp16ToFp32QueueDescriptor& /*desc*/,
1339 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001340{
1341 return std::unique_ptr<IWorkload>();
1342}
1343
Narumol Prangnawaratea54a012020-03-16 16:36:10 +00001344std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToBf16(const ConvertFp32ToBf16QueueDescriptor& /*desc*/,
1345 const WorkloadInfo& /*info*/) const
1346{
1347 return std::unique_ptr<IWorkload>();
1348}
1349
Derek Lamberti901ea112019-12-10 22:07:09 +00001350std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToFp16(const ConvertFp32ToFp16QueueDescriptor& /*desc*/,
1351 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001352{
1353 return std::unique_ptr<IWorkload>();
1354}
1355
Derek Lamberti901ea112019-12-10 22:07:09 +00001356std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& /*descriptor*/,
1357 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001358{
1359 return std::unique_ptr<IWorkload>();
1360}
1361
Derek Lamberti901ea112019-12-10 22:07:09 +00001362std::unique_ptr<IWorkload> IWorkloadFactory::CreateDebug(const DebugQueueDescriptor& /*descriptor*/,
1363 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001364{
1365 return std::unique_ptr<IWorkload>();
1366}
1367
Derek Lamberti901ea112019-12-10 22:07:09 +00001368std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& /*descriptor*/,
1369 const WorkloadInfo& /*info*/) const
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +01001370{
1371 return std::unique_ptr<IWorkload>();
1372}
1373
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001374std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthwiseConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00001375 const DepthwiseConvolution2dQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001376{
1377 return std::unique_ptr<IWorkload>();
1378}
1379
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00001380std::unique_ptr<IWorkload> IWorkloadFactory::CreateDequantize(
Derek Lamberti901ea112019-12-10 22:07:09 +00001381 const DequantizeQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00001382{
1383 return std::unique_ptr<IWorkload>();
1384}
1385
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001386std::unique_ptr<IWorkload> IWorkloadFactory::CreateDetectionPostProcess(
Derek Lamberti901ea112019-12-10 22:07:09 +00001387 const DetectionPostProcessQueueDescriptor& /*descriptor*/, const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001388{
1389 return std::unique_ptr<IWorkload>();
1390}
1391
Derek Lamberti901ea112019-12-10 22:07:09 +00001392std::unique_ptr<IWorkload> IWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& /*descriptor*/,
1393 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001394{
1395 return std::unique_ptr<IWorkload>();
1396}
1397
josh minor4a3c6102020-01-06 16:40:46 -06001398std::unique_ptr<IWorkload> IWorkloadFactory::CreateElementwiseUnary(const ElementwiseUnaryQueueDescriptor& /*desc*/,
1399 const WorkloadInfo& /*info*/) const
1400{
1401 return std::unique_ptr<IWorkload>();
1402}
1403
Derek Lamberti901ea112019-12-10 22:07:09 +00001404std::unique_ptr<IWorkload> IWorkloadFactory::CreateEqual(const EqualQueueDescriptor& /*descriptor*/,
1405 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001406{
1407 return std::unique_ptr<IWorkload>();
1408}
1409
Derek Lamberti901ea112019-12-10 22:07:09 +00001410std::unique_ptr<IWorkload> IWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& /*desc*/,
1411 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001412{
1413 return std::unique_ptr<IWorkload>();
1414}
1415
Ryan OSheaec6c6802020-06-05 17:17:06 +01001416std::unique_ptr<IWorkload> IWorkloadFactory::CreateFill(const FillQueueDescriptor& /*descriptor*/,
1417 const WorkloadInfo& /*info*/) const
1418{
1419 return std::unique_ptr<IWorkload>();
1420}
1421
Derek Lamberti901ea112019-12-10 22:07:09 +00001422std::unique_ptr<IWorkload> IWorkloadFactory::CreateFloor(const FloorQueueDescriptor& /*descriptor*/,
1423 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001424{
1425 return std::unique_ptr<IWorkload>();
1426}
1427
Derek Lamberti901ea112019-12-10 22:07:09 +00001428std::unique_ptr<IWorkload> IWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& /*descriptor*/,
1429 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001430{
1431 return std::unique_ptr<IWorkload>();
1432}
1433
Derek Lamberti901ea112019-12-10 22:07:09 +00001434std::unique_ptr<IWorkload> IWorkloadFactory::CreateGather(const GatherQueueDescriptor& /*descriptor*/,
1435 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001436{
1437 return std::unique_ptr<IWorkload>();
1438}
1439
Derek Lamberti901ea112019-12-10 22:07:09 +00001440std::unique_ptr<IWorkload> IWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& /*descriptor*/,
1441 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001442{
1443 return std::unique_ptr<IWorkload>();
1444}
1445
Kevin Mayce5045a2019-10-02 14:07:47 +01001446std::unique_ptr<IWorkload> IWorkloadFactory::CreateInstanceNormalization(
Derek Lamberti901ea112019-12-10 22:07:09 +00001447 const InstanceNormalizationQueueDescriptor& /*descriptor*/,
1448 const WorkloadInfo& /*info*/) const
Kevin Mayce5045a2019-10-02 14:07:47 +01001449{
1450 return std::unique_ptr<IWorkload>();
1451}
1452
Derek Lamberti901ea112019-12-10 22:07:09 +00001453std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& /*desc*/,
1454 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001455{
1456 return std::unique_ptr<IWorkload>();
1457}
1458
James Conroyaba90cd2020-11-06 16:28:18 +00001459std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogicalBinary(const LogicalBinaryQueueDescriptor& /*desc*/,
1460 const WorkloadInfo& /*info*/) const
1461{
1462 return std::unique_ptr<IWorkload>();
1463}
1464
1465std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogicalUnary(const ElementwiseUnaryQueueDescriptor& /*desc*/,
1466 const WorkloadInfo& /*info*/) const
1467{
1468 return std::unique_ptr<IWorkload>();
1469}
1470
Derek Lamberti901ea112019-12-10 22:07:09 +00001471std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& /*descriptor*/,
1472 const WorkloadInfo& /*info*/) const
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +01001473{
1474 return std::unique_ptr<IWorkload>();
1475}
1476
Derek Lamberti901ea112019-12-10 22:07:09 +00001477std::unique_ptr<IWorkload> IWorkloadFactory::CreateLstm(const LstmQueueDescriptor& /*descriptor*/,
1478 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001479{
1480 return std::unique_ptr<IWorkload>();
1481}
1482
Derek Lamberti901ea112019-12-10 22:07:09 +00001483std::unique_ptr<IWorkload> IWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& /*descriptor*/,
1484 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001485{
1486 return std::unique_ptr<IWorkload>();
1487}
1488
Derek Lamberti901ea112019-12-10 22:07:09 +00001489std::unique_ptr<IWorkload> IWorkloadFactory::CreateMean(const MeanQueueDescriptor& /*descriptor*/,
1490 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001491{
1492 return std::unique_ptr<IWorkload>();
1493}
1494
Derek Lamberti901ea112019-12-10 22:07:09 +00001495std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& /*descriptor*/,
1496 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001497{
1498 return std::unique_ptr<IWorkload>();
1499}
1500
Derek Lamberti901ea112019-12-10 22:07:09 +00001501std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& /*descriptor*/,
1502 const WorkloadInfo& /*info*/) const
Derek Lambertif674aa02019-08-01 15:56:25 +01001503{
1504 return std::unique_ptr<IWorkload>();
1505}
1506
Derek Lamberti901ea112019-12-10 22:07:09 +00001507std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerge(const MergeQueueDescriptor& /*descriptor*/,
1508 const WorkloadInfo& /*info*/) const
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01001509{
1510 return std::unique_ptr<IWorkload>();
1511}
1512
Derek Lamberti901ea112019-12-10 22:07:09 +00001513std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerger(const MergerQueueDescriptor& /*descriptor*/,
1514 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001515{
1516 return std::unique_ptr<IWorkload>();
1517}
1518
Derek Lamberti901ea112019-12-10 22:07:09 +00001519std::unique_ptr<IWorkload> IWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& /*descriptor*/,
1520 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001521{
1522 return std::unique_ptr<IWorkload>();
1523}
1524
Derek Lamberti901ea112019-12-10 22:07:09 +00001525std::unique_ptr<IWorkload> IWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& /*descriptor*/,
1526 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001527{
1528 return std::unique_ptr<IWorkload>();
1529}
1530
Derek Lamberti901ea112019-12-10 22:07:09 +00001531std::unique_ptr<IWorkload> IWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& /*descriptor*/,
1532 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001533{
1534 return std::unique_ptr<IWorkload>();
1535}
1536
Derek Lamberti901ea112019-12-10 22:07:09 +00001537std::unique_ptr<IWorkload> IWorkloadFactory::CreateOutput(const OutputQueueDescriptor& /*descriptor*/,
1538 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001539{
1540 return std::unique_ptr<IWorkload>();
1541}
1542
Derek Lamberti901ea112019-12-10 22:07:09 +00001543std::unique_ptr<IWorkload> IWorkloadFactory::CreatePad(const PadQueueDescriptor& /*descriptor*/,
1544 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001545{
1546 return std::unique_ptr<IWorkload>();
1547}
1548
Derek Lamberti901ea112019-12-10 22:07:09 +00001549std::unique_ptr<IWorkload> IWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& /*descriptor*/,
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001550 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001551{
1552 return std::unique_ptr<IWorkload>();
1553}
1554
Derek Lamberti901ea112019-12-10 22:07:09 +00001555std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& /*descriptor*/,
1556 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001557{
1558 return std::unique_ptr<IWorkload>();
1559}
1560
Derek Lamberti901ea112019-12-10 22:07:09 +00001561std::unique_ptr<IWorkload> IWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& /*descriptor*/,
1562 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001563{
1564 return std::unique_ptr<IWorkload>();
1565}
1566
Derek Lamberti901ea112019-12-10 22:07:09 +00001567std::unique_ptr<IWorkload> IWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &/*descriptor*/,
1568 const WorkloadInfo &/*info*/) const
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001569{
1570 return std::unique_ptr<IWorkload>();
1571}
1572
Derek Lamberti901ea112019-12-10 22:07:09 +00001573std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& /*descriptor*/,
1574 const WorkloadInfo& /*Info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001575{
1576 return std::unique_ptr<IWorkload>();
1577}
1578
James Conroy586a9aa2020-03-20 08:49:33 +00001579std::unique_ptr<IWorkload> IWorkloadFactory::CreateQLstm(const QLstmQueueDescriptor& /*descriptor*/,
1580 const WorkloadInfo& /*info*/) const
1581{
1582 return std::unique_ptr<IWorkload>();
1583}
1584
Derek Lamberti901ea112019-12-10 22:07:09 +00001585std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& /*descriptor*/,
1586 const WorkloadInfo& /*info*/) const
James Conroyee18dc82019-07-17 11:27:46 +01001587{
1588 return std::unique_ptr<IWorkload>();
1589}
Finn Williams2605b232020-06-10 15:53:46 +01001590std::unique_ptr<IWorkload> IWorkloadFactory::CreateRank(const RankQueueDescriptor& /*descriptor*/,
1591 const WorkloadInfo& /*info*/) const
1592{
1593 return std::unique_ptr<IWorkload>();
1594}
James Conroyee18dc82019-07-17 11:27:46 +01001595
Derek Lamberti901ea112019-12-10 22:07:09 +00001596std::unique_ptr<IWorkload> IWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& /*descriptor*/,
1597 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001598{
1599 return std::unique_ptr<IWorkload>();
1600}
1601
Derek Lamberti901ea112019-12-10 22:07:09 +00001602std::unique_ptr<IWorkload> IWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& /*descriptor*/,
1603 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001604{
1605 return std::unique_ptr<IWorkload>();
1606}
1607
Derek Lamberti901ea112019-12-10 22:07:09 +00001608std::unique_ptr<IWorkload> IWorkloadFactory::CreateResize(const ResizeQueueDescriptor& /*descriptor*/,
1609 const WorkloadInfo& /*info*/) const
Teresa Charlina9075df2019-06-27 15:41:57 +01001610{
1611 return std::unique_ptr<IWorkload>();
1612}
1613
Derek Lamberti901ea112019-12-10 22:07:09 +00001614std::unique_ptr<IWorkload> IWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& /*descriptor*/,
1615 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001616{
1617 return std::unique_ptr<IWorkload>();
1618}
1619
Derek Lamberti901ea112019-12-10 22:07:09 +00001620std::unique_ptr<IWorkload> IWorkloadFactory::CreateSlice(const SliceQueueDescriptor& /*descriptor*/,
1621 const WorkloadInfo& /*info*/) const
1622{
1623 return std::unique_ptr<IWorkload>();
1624}
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001625
Derek Lamberti901ea112019-12-10 22:07:09 +00001626std::unique_ptr<IWorkload> IWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& /*descriptor*/,
1627 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001628{
1629 return std::unique_ptr<IWorkload>();
1630}
1631
Derek Lamberti901ea112019-12-10 22:07:09 +00001632std::unique_ptr<IWorkload> IWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& /*descriptor*/,
1633 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001634{
1635 return std::unique_ptr<IWorkload>();
1636}
1637
Derek Lamberti901ea112019-12-10 22:07:09 +00001638std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& /*descriptor*/,
1639 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001640{
1641 return std::unique_ptr<IWorkload>();
1642}
1643
Derek Lamberti901ea112019-12-10 22:07:09 +00001644std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& /*descriptor*/,
1645 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001646{
1647 return std::unique_ptr<IWorkload>();
1648}
1649
Derek Lamberti901ea112019-12-10 22:07:09 +00001650std::unique_ptr<IWorkload> IWorkloadFactory::CreateStack(const StackQueueDescriptor& /*descriptor*/,
1651 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001652{
1653 return std::unique_ptr<IWorkload>();
1654}
1655
Derek Lamberti901ea112019-12-10 22:07:09 +00001656std::unique_ptr<IWorkload> IWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& /*descriptor*/,
1657 const WorkloadInfo& /*info*/) const
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001658{
1659 return std::unique_ptr<IWorkload>();
1660}
1661
Derek Lamberti901ea112019-12-10 22:07:09 +00001662std::unique_ptr<IWorkload> IWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& /*descriptor*/,
1663 const WorkloadInfo& /*info*/) const
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001664{
1665 return std::unique_ptr<IWorkload>();
1666}
1667
Derek Lamberti901ea112019-12-10 22:07:09 +00001668std::unique_ptr<IWorkload> IWorkloadFactory::CreateSwitch(const SwitchQueueDescriptor& /*descriptor*/,
1669 const WorkloadInfo& /*info*/) const
Sadik Armaganeff363d2019-04-05 15:25:46 +01001670{
1671 return std::unique_ptr<IWorkload>();
1672}
1673
Mike Kellyc9ea45a2020-02-28 18:11:58 +00001674std::unique_ptr<IWorkload> IWorkloadFactory::CreateTranspose(const TransposeQueueDescriptor& /*descriptor*/,
1675 const WorkloadInfo& /*info*/) const
1676{
1677 return std::unique_ptr<IWorkload>();
1678}
1679
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001680std::unique_ptr<IWorkload> IWorkloadFactory::CreateTransposeConvolution2d(
Derek Lamberti901ea112019-12-10 22:07:09 +00001681 const TransposeConvolution2dQueueDescriptor& /*descriptor*/,
1682 const WorkloadInfo& /*info*/) const
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001683{
1684 return std::unique_ptr<IWorkload>();
surmeh013537c2c2018-05-18 16:31:43 +01001685}
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001686
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001687} // namepsace armnn