blob: 30dfa023f9120a03bee017ee2d9bb91527a6fe60 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00005
6#include "CpuTensorHandle.hpp"
Derek Lambertia9cca6a2019-03-25 15:41:58 +00007#include "WorkloadFactory.hpp"
8
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009
10#include <Layer.hpp>
11#include <LayersFwd.hpp>
David Beckdcb751f2018-10-03 11:42:42 +010012
David Beckb4540be2018-09-24 13:18:27 +010013#include <armnn/Types.hpp>
14#include <armnn/LayerSupport.hpp>
David Beck111b5d92018-11-12 14:59:37 +000015#include <armnn/ILayerSupport.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000016
David Beck111b5d92018-11-12 14:59:37 +000017#include <backendsCommon/BackendRegistry.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000018#include <backendsCommon/WorkloadFactory.hpp>
David Beck111b5d92018-11-12 14:59:37 +000019#include <backendsCommon/IBackendInternal.hpp>
Francis Murtagh46c09d02019-05-28 08:15:28 +010020#include <backendsCommon/test/WorkloadTestUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000021
22#include <boost/cast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000023#include <boost/iterator/transform_iterator.hpp>
24
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000025#include <cstring>
David Beck111b5d92018-11-12 14:59:37 +000026#include <sstream>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000027
telsoa014fcda012018-03-09 14:13:49 +000028namespace armnn
29{
30
telsoa01c577f2c2018-08-31 09:22:23 +010031namespace
32{
telsoa01c577f2c2018-08-31 09:22:23 +010033
David Beck29c75de2018-10-23 13:35:58 +010034const TensorInfo OverrideDataType(const TensorInfo& info, Optional<DataType> type)
35{
36 if (!type)
37 {
38 return info;
telsoa01c577f2c2018-08-31 09:22:23 +010039 }
40
David Beck29c75de2018-10-23 13:35:58 +010041 return TensorInfo(info.GetShape(), type.value(), info.GetQuantizationScale(), info.GetQuantizationOffset());
telsoa01c577f2c2018-08-31 09:22:23 +010042}
43
David Beck29c75de2018-10-23 13:35:58 +010044} // anonymous namespace
45
David Beck33f0ae02018-10-18 15:13:56 +010046bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
David Beckdcb751f2018-10-03 11:42:42 +010047 const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +010048 Optional<DataType> dataType,
David Beckdcb751f2018-10-03 11:42:42 +010049 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000050{
David Beck33f0ae02018-10-18 15:13:56 +010051 Optional<std::string&> reason = outReasonIfUnsupported;
telsoa014fcda012018-03-09 14:13:49 +000052 bool result;
David Beckdcb751f2018-10-03 11:42:42 +010053 const Layer& layer = *(boost::polymorphic_downcast<const Layer*>(&connectableLayer));
54
David Beck111b5d92018-11-12 14:59:37 +000055 auto const& backendRegistry = BackendRegistryInstance();
56 if (!backendRegistry.IsBackendRegistered(backendId))
57 {
58 std::stringstream ss;
59 ss << connectableLayer.GetName() << " is not supported on " << backendId
60 << " because this backend is not registered.";
61
62 outReasonIfUnsupported = ss.str();
63 return false;
64 }
65
66 auto backendFactory = backendRegistry.GetFactory(backendId);
67 auto backendObject = backendFactory();
68 auto layerSupportObject = backendObject->GetLayerSupport();
David Beck33f0ae02018-10-18 15:13:56 +010069
telsoa014fcda012018-03-09 14:13:49 +000070 switch(layer.GetType())
71 {
Kevin May868eb142019-09-04 17:29:31 +010072 case LayerType::Abs:
73 {
74 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
75 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
76 result = layerSupportObject->IsAbsSupported(OverrideDataType(input, dataType),
77 OverrideDataType(output, dataType),
78 reason);
79 break;
80 }
telsoa014fcda012018-03-09 14:13:49 +000081 case LayerType::Activation:
82 {
83 auto cLayer = boost::polymorphic_downcast<const ActivationLayer*>(&layer);
84 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +010085 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010086 result = layerSupportObject->IsActivationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010087 OverrideDataType(input, dataType),
88 OverrideDataType(output, dataType),
89 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +010090 reason);
telsoa014fcda012018-03-09 14:13:49 +000091 break;
92 }
93 case LayerType::Addition:
94 {
95 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
96 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
97 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010098 result = layerSupportObject->IsAdditionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010099 OverrideDataType(input0, dataType),
100 OverrideDataType(input1, dataType),
101 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100102 reason);
telsoa014fcda012018-03-09 14:13:49 +0000103 break;
104 }
Nikhil Rajee391d52019-09-05 17:50:44 +0100105 case LayerType::ArgMinMax:
106 {
107 auto cLayer = boost::polymorphic_downcast<const ArgMinMaxLayer*>(&layer);
108 const ArgMinMaxDescriptor& descriptor = cLayer->GetParameters();
109
110 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
111 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
112 result = layerSupportObject->IsArgMinMaxSupported(
113 OverrideDataType(input, dataType),
114 OverrideDataType(output, dataType),
115 descriptor,
116 reason);
117 break;
118 }
telsoa014fcda012018-03-09 14:13:49 +0000119 case LayerType::BatchNormalization:
120 {
121 auto cLayer = boost::polymorphic_downcast<const BatchNormalizationLayer*>(&layer);
122 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100123 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
124 const TensorInfo& mean = cLayer->m_Mean->GetTensorInfo();
125 const TensorInfo& var = cLayer->m_Variance->GetTensorInfo();
126 const TensorInfo& beta = cLayer->m_Beta->GetTensorInfo();
127 const TensorInfo& gamma = cLayer->m_Gamma->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100128 result = layerSupportObject->IsBatchNormalizationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100129 OverrideDataType(input, dataType),
130 OverrideDataType(output, dataType),
131 OverrideDataType(mean, dataType),
132 OverrideDataType(var, dataType),
133 OverrideDataType(beta, dataType),
134 OverrideDataType(gamma, dataType),
135 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100136 reason);
telsoa014fcda012018-03-09 14:13:49 +0000137 break;
138 }
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000139 case LayerType::BatchToSpaceNd:
140 {
141 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
142 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
143 auto cLayer = boost::polymorphic_downcast<const BatchToSpaceNdLayer*>(&layer);
144
145 result = layerSupportObject->IsBatchToSpaceNdSupported(OverrideDataType(input, dataType),
146 OverrideDataType(output, dataType),
147 cLayer->GetParameters(),
148 reason);
149 break;
150 }
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100151 case LayerType::Comparison:
152 {
153 auto cLayer = boost::polymorphic_downcast<const ComparisonLayer*>(&layer);
154
155 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
156 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
157 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
158
159 result = layerSupportObject->IsComparisonSupported(OverrideDataType(input0, dataType),
160 OverrideDataType(input1, dataType),
161 OverrideDataType(output, DataType::Boolean),
162 cLayer->GetParameters(),
163 reason);
164 break;
165 }
telsoa014fcda012018-03-09 14:13:49 +0000166 case LayerType::Constant:
167 {
168 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100169 result = layerSupportObject->IsConstantSupported(OverrideDataType(output, dataType), reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100170 break;
171 }
172 case LayerType::ConvertFp16ToFp32:
173 {
174 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
175 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100176 result = layerSupportObject->IsConvertFp16ToFp32Supported(input, output, reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100177 break;
178 }
179 case LayerType::ConvertFp32ToFp16:
180 {
181 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
182 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100183 result = layerSupportObject->IsConvertFp32ToFp16Supported(input, output, reason);
telsoa014fcda012018-03-09 14:13:49 +0000184 break;
185 }
186 case LayerType::Convolution2d:
187 {
188 auto cLayer = boost::polymorphic_downcast<const Convolution2dLayer*>(&layer);
arovir01a6824102018-08-28 17:40:45 +0100189
190 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
191 dataType);
telsoa01c577f2c2018-08-31 09:22:23 +0100192 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
surmeh013537c2c2018-05-18 16:31:43 +0100193 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
194
arovir01a6824102018-08-28 17:40:45 +0100195 const Convolution2dDescriptor& descriptor = cLayer->GetParameters();
surmeh013537c2c2018-05-18 16:31:43 +0100196
arovir01a6824102018-08-28 17:40:45 +0100197 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100198 Optional<TensorInfo> biases;
surmeh013537c2c2018-05-18 16:31:43 +0100199 if (descriptor.m_BiasEnabled)
200 {
David Beck5eec11d2018-10-04 15:43:17 +0100201 biases =
202 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
surmeh013537c2c2018-05-18 16:31:43 +0100203 }
204
David Beck33f0ae02018-10-18 15:13:56 +0100205 result = layerSupportObject->IsConvolution2dSupported(
surmeh013537c2c2018-05-18 16:31:43 +0100206 input,
207 output,
208 descriptor,
telsoa01c577f2c2018-08-31 09:22:23 +0100209 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100210 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100211 reason);
telsoa014fcda012018-03-09 14:13:49 +0000212 break;
213 }
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000214 case LayerType::Debug:
215 {
216 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
217 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
218
219 result = layerSupportObject->IsDebugSupported(OverrideDataType(input, dataType),
220 OverrideDataType(output, dataType),
221 reason);
222 break;
223 }
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100224 case LayerType::DepthToSpace:
225 {
226 auto cLayer = boost::polymorphic_downcast<const DepthToSpaceLayer*>(&layer);
227
228 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
229 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
230
231 result = layerSupportObject->IsDepthToSpaceSupported(OverrideDataType(input, dataType),
232 OverrideDataType(output, dataType),
233 cLayer->GetParameters(),
234 reason);
235 break;
236 }
telsoa014fcda012018-03-09 14:13:49 +0000237 case LayerType::DepthwiseConvolution2d:
238 {
239 auto cLayer = boost::polymorphic_downcast<const DepthwiseConvolution2dLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100240 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
241 dataType);
242 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
243 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
244
telsoa01c577f2c2018-08-31 09:22:23 +0100245 const DepthwiseConvolution2dDescriptor& descriptor = cLayer->GetParameters();
arovir01a6824102018-08-28 17:40:45 +0100246
247 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100248 Optional<TensorInfo> biases;
telsoa01c577f2c2018-08-31 09:22:23 +0100249 if (descriptor.m_BiasEnabled)
250 {
David Beck5eec11d2018-10-04 15:43:17 +0100251 biases =
252 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
telsoa01c577f2c2018-08-31 09:22:23 +0100253 }
telsoa01c577f2c2018-08-31 09:22:23 +0100254
David Beck33f0ae02018-10-18 15:13:56 +0100255 result = layerSupportObject->IsDepthwiseConvolutionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100256 input,
257 output,
258 descriptor,
259 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100260 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100261 reason);
telsoa014fcda012018-03-09 14:13:49 +0000262 break;
263 }
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000264 case LayerType::Dequantize:
265 {
266 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
267 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
268
269 result = layerSupportObject->IsDequantizeSupported(OverrideDataType(input, dataType),
270 OverrideDataType(output, DataType::Float32),
271 reason);
272 break;
273 }
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000274 case LayerType::DetectionPostProcess:
275 {
276 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
277 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
278 auto cLayer = boost::polymorphic_downcast<const DetectionPostProcessLayer*>(&layer);
279 const DetectionPostProcessDescriptor& descriptor = cLayer->GetParameters();
280 result = layerSupportObject->IsDetectionPostProcessSupported(input0,
281 input1,
282 descriptor,
283 reason);
284 break;
285 }
telsoa014fcda012018-03-09 14:13:49 +0000286 case LayerType::FakeQuantization:
287 {
288 auto cLayer = boost::polymorphic_downcast<const FakeQuantizationLayer*>(&layer);
289 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100290 result = layerSupportObject->IsFakeQuantizationSupported(OverrideDataType(input, dataType),
291 cLayer->GetParameters(),
292 reason);
telsoa014fcda012018-03-09 14:13:49 +0000293 break;
294 }
295 case LayerType::Floor:
296 {
297 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
298 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100299 result = layerSupportObject->IsFloorSupported(OverrideDataType(input, dataType),
300 OverrideDataType(output, dataType),
301 reason);
telsoa014fcda012018-03-09 14:13:49 +0000302 break;
303 }
304 case LayerType::FullyConnected:
305 {
306 auto cLayer = boost::polymorphic_downcast<const FullyConnectedLayer*>(&layer);
307 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100308 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
309 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
310
311 TensorInfo biasInfo;
312 const TensorInfo * biasInfoPtr = nullptr;
313 static const TensorInfo dummyFloat16Bias(TensorShape({1,1,1,1}), DataType::Float16);
314 static const TensorInfo dummyFloat32Bias(TensorShape({1,1,1,1}), DataType::Float32);
315 static const TensorInfo dummyQA8Bias(TensorShape({1,1,1,1}), DataType::Signed32);
316
317 const FullyConnectedDescriptor& descriptor = cLayer->GetParameters();
318 if (descriptor.m_BiasEnabled)
319 {
320 BOOST_ASSERT(cLayer->m_Bias.get() != nullptr);
321 biasInfo = OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
322 biasInfoPtr = &biasInfo;
323 }
324 else
325 {
326 // If biases are not enabled pass a dummy tensorinfo for the validation
327 switch(input.GetDataType())
328 {
329 case DataType::Float16:
330 {
331 biasInfoPtr = &dummyFloat16Bias;
332 break;
333 }
334 case DataType::Float32:
335 {
336 biasInfoPtr = &dummyFloat32Bias;
337 break;
338 }
339 case DataType::QuantisedAsymm8:
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100340 case DataType::QuantisedSymm16:
telsoa01c577f2c2018-08-31 09:22:23 +0100341 {
342 biasInfoPtr = &dummyQA8Bias;
343 break;
344 }
345 default:
346 {
347 BOOST_ASSERT_MSG(false, "Unexpected bias type");
348 }
349 }
350 }
351
David Beck33f0ae02018-10-18 15:13:56 +0100352 result = layerSupportObject->IsFullyConnectedSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100353 OverrideDataType(input, dataType),
354 OverrideDataType(output, dataType),
355 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
356 *biasInfoPtr,
357 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100358 reason);
telsoa014fcda012018-03-09 14:13:49 +0000359 break;
360 }
narpra01b89b05f2019-01-16 09:53:09 +0000361 case LayerType::Gather:
362 {
363 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
364 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
365 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
366 result = layerSupportObject->IsGatherSupported(OverrideDataType(input0, dataType),
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100367 input1,
narpra01b89b05f2019-01-16 09:53:09 +0000368 OverrideDataType(output, dataType),
369 reason);
370 break;
371 }
telsoa014fcda012018-03-09 14:13:49 +0000372 case LayerType::Input:
373 {
374 const TensorInfo& input = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100375 result = layerSupportObject->IsInputSupported(OverrideDataType(input, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000376 break;
377 }
Kevin Mayce5045a2019-10-02 14:07:47 +0100378 case LayerType::InstanceNormalization:
379 {
380 auto cLayer = boost::polymorphic_downcast<const InstanceNormalizationLayer*>(&layer);
381 const InstanceNormalizationDescriptor& descriptor = cLayer->GetParameters();
382
383 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
384 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
385
386 result = layerSupportObject->IsInstanceNormalizationSupported(
387 OverrideDataType(input, dataType),
388 OverrideDataType(output, dataType),
389 descriptor,
390 reason);
391 break;
392 }
telsoa014fcda012018-03-09 14:13:49 +0000393 case LayerType::L2Normalization:
394 {
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100395 auto cLayer = boost::polymorphic_downcast<const L2NormalizationLayer*>(&layer);
396 const L2NormalizationDescriptor& descriptor = cLayer->GetParameters();
397
telsoa014fcda012018-03-09 14:13:49 +0000398 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100399 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100400
David Beck33f0ae02018-10-18 15:13:56 +0100401 result = layerSupportObject->IsL2NormalizationSupported(
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100402 OverrideDataType(input, dataType),
403 OverrideDataType(output, dataType),
404 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100405 reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100406 break;
407 }
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100408 case LayerType::LogSoftmax:
409 {
410 auto cLayer = boost::polymorphic_downcast<const LogSoftmaxLayer*>(&layer);
411
412 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
413 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
414
415 result = layerSupportObject->IsLogSoftmaxSupported(OverrideDataType(input, dataType),
416 OverrideDataType(output, dataType),
417 cLayer->GetParameters(),
418 reason);
419 break;
420 }
telsoa01c577f2c2018-08-31 09:22:23 +0100421 case LayerType::Lstm:
422 {
423 auto cLayer = boost::polymorphic_downcast<const LstmLayer*>(&layer);
424 const LstmDescriptor& descriptor = cLayer->GetParameters();
425
426 // All inputs.
427 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
428 dataType);
429 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
430 dataType);
431 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
432 dataType);
433 // All outputs
434 const TensorInfo& scratchBuffer = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
435 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
436 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
437 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(3).GetTensorInfo(), dataType);
438
439 // Basic parameters
440 const TensorInfo& inputToForgetWeights
441 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
442 const TensorInfo& inputToCellWeights
443 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
444 const TensorInfo& inputToOutputWeights
445 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
446 const TensorInfo& recurrentToForgetWeights
447 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
448 const TensorInfo& recurrentToCellWeights
449 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
450 const TensorInfo& recurrentToOutputWeights
451 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
452 const TensorInfo& forgetGateBias
453 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
454 const TensorInfo& cellBias
455 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
456 const TensorInfo& outputGateBias
457 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
458
Jan Eilersd01a83c2019-07-03 18:20:40 +0100459 LstmInputParamsInfo paramsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100460
Jan Eilersd01a83c2019-07-03 18:20:40 +0100461 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
462 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
463 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
464 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
465 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
466 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
467 paramsInfo.m_ForgetGateBias = &forgetGateBias;
468 paramsInfo.m_CellBias = &cellBias;
469 paramsInfo.m_OutputGateBias = &outputGateBias;
470
471
472 // Optional parameters
telsoa01c577f2c2018-08-31 09:22:23 +0100473 TensorInfo optInputToInputWeights;
474 TensorInfo optRecurrentToInputWeights;
475 TensorInfo optCellToInputWeights;
476 TensorInfo optInputGateBias;
477 TensorInfo optProjectionWeights;
478 TensorInfo optProjectionBias;
479 TensorInfo optCellToForgetWeights;
480 TensorInfo optCellToOutputWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100481 TensorInfo optInputLayerNormWeights;
482 TensorInfo optForgetLayerNormWeights;
483 TensorInfo optCellLayerNormWeights;
484 TensorInfo optOutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100485
486 if(!descriptor.m_CifgEnabled)
487 {
488 optInputToInputWeights =
489 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100490 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100491
492 optRecurrentToInputWeights =
493 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100494 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100495 if (cLayer->m_CifgParameters.m_CellToInputWeights != nullptr)
496 {
497 optCellToInputWeights =
498 OverrideDataType(cLayer->m_CifgParameters.m_CellToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100499 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100500 }
501 optInputGateBias =
502 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100503 paramsInfo.m_InputGateBias = &optInputGateBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100504 }
505
506 if(descriptor.m_ProjectionEnabled)
507 {
508 optProjectionWeights =
509 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100510 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100511 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
512 {
513 optProjectionBias =
514 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100515 paramsInfo.m_ProjectionBias = &optProjectionBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100516 }
517 }
518
519 if(descriptor.m_PeepholeEnabled)
520 {
521 optCellToForgetWeights =
522 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100523 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100524 optCellToOutputWeights =
525 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100526 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100527 }
528
Jan Eilers38e05bd2019-06-26 13:10:09 +0100529 if(descriptor.m_LayerNormEnabled)
530 {
Ferran Balaguere30c16e2019-07-24 17:03:45 +0100531 if (!descriptor.m_CifgEnabled)
532 {
533 optInputLayerNormWeights = OverrideDataType(
534 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
535 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
536 }
Jan Eilers38e05bd2019-06-26 13:10:09 +0100537
538 optForgetLayerNormWeights = OverrideDataType(
539 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100540 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100541
542 optCellLayerNormWeights = OverrideDataType(
543 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100544 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100545
546 optOutputLayerNormWeights = OverrideDataType(
547 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100548 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100549 }
550
David Beck33f0ae02018-10-18 15:13:56 +0100551 result = layerSupportObject->IsLstmSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100552 input,
553 outputStateIn,
554 cellStateIn,
555 scratchBuffer,
556 outputStateOut,
557 cellStateOut,
558 output,
559 descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +0100560 paramsInfo,
561 reason);
telsoa014fcda012018-03-09 14:13:49 +0000562 break;
563 }
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000564 case LayerType::Maximum:
565 {
566 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
567 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
568 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
569
570 result = layerSupportObject->IsMaximumSupported(OverrideDataType(input0, dataType),
571 OverrideDataType(input1, dataType),
572 OverrideDataType(output, dataType),
573 reason);
574 break;
575 }
narpra01b89b05f2019-01-16 09:53:09 +0000576 case LayerType::MemCopy:
577 {
578 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
579 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000580
narpra01b89b05f2019-01-16 09:53:09 +0000581 result = layerSupportObject->IsMemCopySupported(OverrideDataType(input, dataType),
582 OverrideDataType(output, dataType),
583 reason);
584 break;
585 }
Derek Lambertif674aa02019-08-01 15:56:25 +0100586 case LayerType::MemImport:
587 {
588 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
589 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
590
591 result = layerSupportObject->IsMemImportSupported(OverrideDataType(input, dataType),
592 OverrideDataType(output, dataType),
593 reason);
594 break;
595 }
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100596 case LayerType::Merge:
597 {
598 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
599 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
600 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
601
602 result = layerSupportObject->IsMergeSupported(OverrideDataType(input0, dataType),
603 OverrideDataType(input1, dataType),
604 OverrideDataType(output, dataType),
605 reason);
606 break;
607 }
Jim Flynne242f2d2019-05-22 14:24:13 +0100608 case LayerType::Concat:
telsoa014fcda012018-03-09 14:13:49 +0000609 {
Jim Flynne242f2d2019-05-22 14:24:13 +0100610 auto cLayer = boost::polymorphic_downcast<const ConcatLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000611
telsoa01c577f2c2018-08-31 09:22:23 +0100612 // Get vector of all inputs.
613 auto getTensorInfo = [&dataType](const InputSlot& slot)
telsoa014fcda012018-03-09 14:13:49 +0000614 {
telsoa01c577f2c2018-08-31 09:22:23 +0100615 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
telsoa014fcda012018-03-09 14:13:49 +0000616 };
telsoa01c577f2c2018-08-31 09:22:23 +0100617 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
618 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
619 std::vector<TensorInfo> inputs(beginI, endI);
telsoa014fcda012018-03-09 14:13:49 +0000620
telsoa01c577f2c2018-08-31 09:22:23 +0100621 auto getTensorInfoPtr = [](const TensorInfo& info)
622 {
623 return &info;
624 };
625 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
626 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
627 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
telsoa014fcda012018-03-09 14:13:49 +0000628
Nikhil Raj8599a412018-11-19 14:51:07 +0000629 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
630
Jim Flynne242f2d2019-05-22 14:24:13 +0100631 result = layerSupportObject->IsConcatSupported(inputPtrs, output, cLayer->GetParameters(), reason);
632
633
telsoa014fcda012018-03-09 14:13:49 +0000634 break;
635 }
636 case LayerType::Multiplication:
637 {
638 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
639 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100640 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100641 result = layerSupportObject->IsMultiplicationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100642 OverrideDataType(input0, dataType),
643 OverrideDataType(input1, dataType),
644 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100645 reason);
telsoa014fcda012018-03-09 14:13:49 +0000646 break;
647 }
648 case LayerType::Normalization:
649 {
650 auto cLayer = boost::polymorphic_downcast<const NormalizationLayer*>(&layer);
651 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
652 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100653 result = layerSupportObject->IsNormalizationSupported(OverrideDataType(input, dataType),
654 OverrideDataType(output, dataType),
655 cLayer->GetParameters(),
656 reason);
telsoa014fcda012018-03-09 14:13:49 +0000657 break;
658 }
659 case LayerType::Output:
660 {
661 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100662 result = layerSupportObject->IsOutputSupported(OverrideDataType(output, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000663 break;
664 }
665 case LayerType::Permute:
666 {
667 auto cLayer = boost::polymorphic_downcast<const PermuteLayer*>(&layer);
668 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
669 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100670 result = layerSupportObject->IsPermuteSupported(OverrideDataType(input, dataType),
671 OverrideDataType(output, dataType),
672 cLayer->GetParameters(),
673 reason);
telsoa014fcda012018-03-09 14:13:49 +0000674 break;
675 }
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100676 case LayerType::Pad:
677 {
678 auto cLayer = boost::polymorphic_downcast<const PadLayer*>(&layer);
679 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
680 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100681 result = layerSupportObject->IsPadSupported(
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100682 OverrideDataType(input, dataType),
683 OverrideDataType(output, dataType),
684 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100685 reason);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100686 break;
687 }
telsoa014fcda012018-03-09 14:13:49 +0000688 case LayerType::Pooling2d:
689 {
690 auto cLayer = boost::polymorphic_downcast<const Pooling2dLayer*>(&layer);
691 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
692 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100693 result = layerSupportObject->IsPooling2dSupported(OverrideDataType(input, dataType),
694 OverrideDataType(output, dataType),
695 cLayer->GetParameters(),
696 reason);
telsoa014fcda012018-03-09 14:13:49 +0000697 break;
698 }
Matteo Martincigh49124022019-01-11 13:25:59 +0000699 case LayerType::PreCompiled:
700 {
701 auto cLayer = boost::polymorphic_downcast<const PreCompiledLayer*>(&layer);
702 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
703 result = layerSupportObject->IsPreCompiledSupported(OverrideDataType(input, dataType),
704 cLayer->GetParameters(),
705 reason);
706 break;
707 }
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000708 case LayerType::Quantize:
709 {
710 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
711 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
712 result = layerSupportObject->IsQuantizeSupported(input, output, reason);
713 break;
714 }
James Conroyee18dc82019-07-17 11:27:46 +0100715 case LayerType::QuantizedLstm:
716 {
717 auto cLayer = boost::polymorphic_downcast<const QuantizedLstmLayer*>(&layer);
718
719 // Inputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100720 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
721 const TensorInfo& previousCellStateIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
722 const TensorInfo& previousOutputIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100723
724 // Outputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100725 const TensorInfo& cellStateOut = layer.GetOutputSlot(0).GetTensorInfo();
726 const TensorInfo& output = layer.GetOutputSlot(1).GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100727
728 // QuantizedLstm parameters
James Conroyee18dc82019-07-17 11:27:46 +0100729 QuantizedLstmInputParamsInfo paramsInfo;
730
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100731 paramsInfo.m_InputToInputWeights =
732 &cLayer->m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo();
733 paramsInfo.m_InputToForgetWeights =
734 &cLayer->m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo();
735 paramsInfo.m_InputToCellWeights =
736 &cLayer->m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo();
737 paramsInfo.m_InputToOutputWeights =
738 &cLayer->m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100739
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100740 paramsInfo.m_RecurrentToInputWeights =
741 &cLayer->m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo();
742 paramsInfo.m_RecurrentToForgetWeights =
743 &cLayer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo();
744 paramsInfo.m_RecurrentToCellWeights =
745 &cLayer->m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo();
746 paramsInfo.m_RecurrentToOutputWeights =
747 &cLayer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100748
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100749 paramsInfo.m_InputGateBias =
750 &cLayer->m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo();
751 paramsInfo.m_ForgetGateBias =
752 &cLayer->m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo();
753 paramsInfo.m_CellBias =
754 &cLayer->m_QuantizedLstmParameters.m_CellBias->GetTensorInfo();
755 paramsInfo.m_OutputGateBias =
756 &cLayer->m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo();;
James Conroyee18dc82019-07-17 11:27:46 +0100757
758 result = layerSupportObject->IsQuantizedLstmSupported(input,
759 previousCellStateIn,
760 previousOutputIn,
761 cellStateOut,
762 output,
763 paramsInfo,
764 reason);
765 break;
766 }
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100767 case LayerType::Division:
768 {
769 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
770 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
771 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100772 result = layerSupportObject->IsDivisionSupported(
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100773 OverrideDataType(input0, dataType),
774 OverrideDataType(input1, dataType),
775 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100776 reason);
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100777 break;
778 }
telsoa014fcda012018-03-09 14:13:49 +0000779 case LayerType::Reshape:
780 {
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000781 auto cLayer = boost::polymorphic_downcast<const ReshapeLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000782 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000783 result = layerSupportObject->IsReshapeSupported(OverrideDataType(input, dataType),
784 cLayer->GetParameters(),
785 reason);
telsoa014fcda012018-03-09 14:13:49 +0000786 break;
787 }
Teresa Charlina9075df2019-06-27 15:41:57 +0100788 case LayerType::Resize:
789 {
790 auto cLayer = boost::polymorphic_downcast<const ResizeLayer*>(&layer);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100791 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Teresa Charlina9075df2019-06-27 15:41:57 +0100792 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
793 result = layerSupportObject->IsResizeSupported(OverrideDataType(input, dataType),
794 OverrideDataType(output, dataType),
795 cLayer->GetParameters(),
796 reason);
797 break;
798 }
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000799 case LayerType::Rsqrt:
800 {
801 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
802 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
803 result = layerSupportObject->IsRsqrtSupported(OverrideDataType(input, dataType),
804 OverrideDataType(output, dataType),
805 reason);
806 break;
807 }
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100808 case LayerType::Slice:
809 {
810 auto cLayer = boost::polymorphic_downcast<const SliceLayer*>(&layer);
811
812 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
813 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
814
815 result = layerSupportObject->IsSliceSupported(OverrideDataType(input, dataType),
816 OverrideDataType(output, dataType),
817 cLayer->GetParameters(),
818 reason);
819 break;
820 }
telsoa014fcda012018-03-09 14:13:49 +0000821 case LayerType::Softmax:
822 {
823 auto cLayer = boost::polymorphic_downcast<const SoftmaxLayer*>(&layer);
824 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100825 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100826 result = layerSupportObject->IsSoftmaxSupported(OverrideDataType(input, dataType),
827 OverrideDataType(output, dataType),
828 cLayer->GetParameters(),
829 reason);
telsoa014fcda012018-03-09 14:13:49 +0000830 break;
831 }
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000832 case LayerType::SpaceToBatchNd:
833 {
834 auto cLayer = boost::polymorphic_downcast<const SpaceToBatchNdLayer*>(&layer);
835 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
836 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
837 result = layerSupportObject->IsSpaceToBatchNdSupported(OverrideDataType(input, dataType),
838 OverrideDataType(output, dataType),
839 cLayer->GetParameters(),
840 reason);
841 break;
842 }
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100843 case LayerType::SpaceToDepth:
844 {
845 auto cLayer = boost::polymorphic_downcast<const SpaceToDepthLayer*>(&layer);
846
847 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
848 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
849
850 result = layerSupportObject->IsSpaceToDepthSupported(OverrideDataType(input, dataType),
851 OverrideDataType(output, dataType),
852 cLayer->GetParameters(),
853 reason);
854 break;
855 }
telsoa014fcda012018-03-09 14:13:49 +0000856 case LayerType::Splitter:
857 {
858 auto cLayer = boost::polymorphic_downcast<const SplitterLayer*>(&layer);
859 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +0100860
861 // Get vector of all outputs.
862 auto getTensorInfo = [&dataType](const OutputSlot& slot)
863 {
864 return OverrideDataType(slot.GetTensorInfo(), dataType);
865 };
866 auto beginI = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfo);
867 auto endI = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfo);
868 std::vector<TensorInfo> outputs(beginI, endI);
869
870 const std::vector<std::reference_wrapper<TensorInfo>> outputPtrs(outputs.begin(), outputs.end());
871
David Beck33f0ae02018-10-18 15:13:56 +0100872 result = layerSupportObject->IsSplitterSupported(OverrideDataType(input, dataType),
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +0100873 outputPtrs,
David Beck33f0ae02018-10-18 15:13:56 +0100874 cLayer->GetParameters(),
875 reason);
telsoa014fcda012018-03-09 14:13:49 +0000876 break;
877 }
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100878 case LayerType::Stack:
879 {
880 auto cLayer = boost::polymorphic_downcast<const StackLayer*>(&layer);
881
882 // Get vector of all inputs.
883 auto getTensorInfo = [&dataType](const InputSlot& slot)
884 {
885 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
886 };
887 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
888 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
889 std::vector<TensorInfo> inputs(beginI, endI);
890
891 auto getTensorInfoPtr = [](const TensorInfo& info)
892 {
893 return &info;
894 };
895 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
896 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
897 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
898
899 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
900
901 result = layerSupportObject->IsStackSupported(inputPtrs, output, cLayer->GetParameters(), reason);
902
903 break;
904 }
Conor Kennedy430b5d82018-11-14 15:28:28 +0000905 case LayerType::StridedSlice:
906 {
907 auto cLayer = boost::polymorphic_downcast<const StridedSliceLayer*>(&layer);
908 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
909 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
910 result = layerSupportObject->IsStridedSliceSupported(OverrideDataType(input, dataType),
911 OverrideDataType(output, dataType),
912 cLayer->GetParameters(),
913 reason);
914 break;
915 }
David Beckc2044fe2018-09-05 15:00:38 +0100916 case LayerType::Subtraction:
917 {
918 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
919 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
920 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100921 result = layerSupportObject->IsSubtractionSupported(
David Beckc2044fe2018-09-05 15:00:38 +0100922 OverrideDataType(input0, dataType),
923 OverrideDataType(input1, dataType),
924 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100925 reason);
David Beckc2044fe2018-09-05 15:00:38 +0100926 break;
927 }
Sadik Armaganeff363d2019-04-05 15:25:46 +0100928 case LayerType::Switch:
929 {
930 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
931 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
932 const TensorInfo& output0 = layer.GetOutputSlot(0).GetTensorInfo();
933 const TensorInfo& output1 = layer.GetOutputSlot(1).GetTensorInfo();
934 result = layerSupportObject->IsSwitchSupported(OverrideDataType(input0, dataType),
935 OverrideDataType(input1, dataType),
936 OverrideDataType(output0, dataType),
937 OverrideDataType(output1, dataType),
938 reason);
939 break;
940 }
narpra0132b90462018-09-13 11:07:48 +0100941 case LayerType::Mean:
942 {
943 auto cLayer = boost::polymorphic_downcast<const MeanLayer*>(&layer);
944 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
945 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100946 result = layerSupportObject->IsMeanSupported(
narpra0132b90462018-09-13 11:07:48 +0100947 OverrideDataType(input, dataType),
948 OverrideDataType(output, dataType),
949 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100950 reason);
narpra0132b90462018-09-13 11:07:48 +0100951 break;
952 }
kevmay0190539692018-11-29 08:40:19 +0000953 case LayerType::Minimum:
954 {
955 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
956 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
957 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
958 result = layerSupportObject->IsMinimumSupported(OverrideDataType(input0, dataType),
959 OverrideDataType(input1, dataType),
960 OverrideDataType(output, dataType),
961 reason);
962 break;
963 }
Matteo Martincigh0e406ee2019-06-12 15:42:18 +0100964 case LayerType::Prelu:
965 {
966 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
967 const TensorInfo& alpha = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
968 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
969 result = layerSupportObject->IsPreluSupported(OverrideDataType(input, dataType),
970 OverrideDataType(alpha, dataType),
971 OverrideDataType(output, dataType),
972 reason);
973 break;
974 }
Aron Virginas-Tar639fb042019-06-20 14:28:19 +0100975 case LayerType::TransposeConvolution2d:
976 {
977 auto cLayer = boost::polymorphic_downcast<const TransposeConvolution2dLayer*>(&layer);
978
979 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
980 dataType);
981 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
982
983 const TransposeConvolution2dDescriptor& descriptor = cLayer->GetParameters();
984
985 Optional<TensorInfo> biases;
986 if (descriptor.m_BiasEnabled)
987 {
988 BOOST_ASSERT(cLayer->m_Bias.get() != nullptr);
989 biases = OverrideDataType(cLayer->m_Bias->GetTensorInfo(),
990 GetBiasTypeFromWeightsType(dataType));
991 }
992
993 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
994 const TensorInfo weights = OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType);
995
996 result = layerSupportObject->IsTransposeConvolution2dSupported(input,
997 output,
998 descriptor,
999 weights,
1000 biases,
1001 reason);
1002
1003 break;
1004 }
telsoa014fcda012018-03-09 14:13:49 +00001005 default:
1006 {
1007 BOOST_ASSERT_MSG(false, "WorkloadFactory did not recognise type of layer.");
David Beck33f0ae02018-10-18 15:13:56 +01001008 reason.value() = "Unrecognised layer type";
telsoa014fcda012018-03-09 14:13:49 +00001009 result = false;
1010 break;
1011 }
1012 }
telsoa014fcda012018-03-09 14:13:49 +00001013 return result;
1014}
1015
David Beckdcb751f2018-10-03 11:42:42 +01001016bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +01001017 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +01001018 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +00001019{
David Beckdcb751f2018-10-03 11:42:42 +01001020 auto layer = boost::polymorphic_downcast<const Layer*>(&connectableLayer);
David Beck33f0ae02018-10-18 15:13:56 +01001021 return IsLayerSupported(layer->GetBackendId(), connectableLayer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +00001022}
1023
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001024// Default Implementations
Kevin May868eb142019-09-04 17:29:31 +01001025std::unique_ptr<IWorkload> IWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
1026 const WorkloadInfo& info) const
1027{
1028 return std::unique_ptr<IWorkload>();
1029}
1030
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001031std::unique_ptr<IWorkload> IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
1032 const WorkloadInfo& info) const
1033{
1034 return std::unique_ptr<IWorkload>();
1035}
1036
1037std::unique_ptr<IWorkload> IWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
1038 const WorkloadInfo& info) const
1039{
1040 return std::unique_ptr<IWorkload>();
1041}
1042
Nikhil Rajee391d52019-09-05 17:50:44 +01001043std::unique_ptr<IWorkload> IWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
1044 const WorkloadInfo& info) const
1045{
1046 return std::unique_ptr<IWorkload>();
1047}
1048
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001049std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchNormalization(
1050 const BatchNormalizationQueueDescriptor& descriptor, const WorkloadInfo& info) const
1051{
1052 return std::unique_ptr<IWorkload>();
1053}
1054
1055std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
1056 const WorkloadInfo& Info) const
1057{
1058 return std::unique_ptr<IWorkload>();
1059}
1060
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001061std::unique_ptr<IWorkload> IWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
1062 const WorkloadInfo& info) const
1063{
1064 return std::unique_ptr<IWorkload>();
1065}
1066
Jim Flynne242f2d2019-05-22 14:24:13 +01001067std::unique_ptr<IWorkload> IWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
Jim Flynn4ed6c832019-05-20 11:02:46 +01001068 const WorkloadInfo& info) const
1069{
1070 return std::unique_ptr<IWorkload>();
1071}
1072
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001073std::unique_ptr<IWorkload> IWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
1074 const WorkloadInfo& info) const
1075{
1076 return std::unique_ptr<IWorkload>();
1077}
1078
1079std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp16ToFp32(const ConvertFp16ToFp32QueueDescriptor& descriptor,
1080 const WorkloadInfo& info) const
1081{
1082 return std::unique_ptr<IWorkload>();
1083}
1084
1085std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToFp16(const ConvertFp32ToFp16QueueDescriptor& descriptor,
1086 const WorkloadInfo& info) const
1087{
1088 return std::unique_ptr<IWorkload>();
1089}
1090
1091std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
1092 const WorkloadInfo& info) const
1093{
1094 return std::unique_ptr<IWorkload>();
1095}
1096
1097std::unique_ptr<IWorkload> IWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
1098 const WorkloadInfo& info) const
1099{
1100 return std::unique_ptr<IWorkload>();
1101}
1102
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +01001103std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
1104 const WorkloadInfo& info) const
1105{
1106 return std::unique_ptr<IWorkload>();
1107}
1108
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001109std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthwiseConvolution2d(
1110 const DepthwiseConvolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const
1111{
1112 return std::unique_ptr<IWorkload>();
1113}
1114
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00001115std::unique_ptr<IWorkload> IWorkloadFactory::CreateDequantize(
1116 const DequantizeQueueDescriptor& descriptor, const WorkloadInfo& info) const
1117{
1118 return std::unique_ptr<IWorkload>();
1119}
1120
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001121std::unique_ptr<IWorkload> IWorkloadFactory::CreateDetectionPostProcess(
1122 const DetectionPostProcessQueueDescriptor& descriptor, const WorkloadInfo& info) const
1123{
1124 return std::unique_ptr<IWorkload>();
1125}
1126
1127std::unique_ptr<IWorkload> IWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
1128 const WorkloadInfo& info) const
1129{
1130 return std::unique_ptr<IWorkload>();
1131}
1132
1133std::unique_ptr<IWorkload> IWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
1134 const WorkloadInfo& Info) const
1135{
1136 return std::unique_ptr<IWorkload>();
1137}
1138
1139std::unique_ptr<IWorkload> IWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& descriptor,
1140 const WorkloadInfo& info) const
1141{
1142 return std::unique_ptr<IWorkload>();
1143}
1144
1145std::unique_ptr<IWorkload> IWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
1146 const WorkloadInfo& info) const
1147{
1148 return std::unique_ptr<IWorkload>();
1149}
1150
1151std::unique_ptr<IWorkload> IWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& descriptor,
1152 const WorkloadInfo& info) const
1153{
1154 return std::unique_ptr<IWorkload>();
1155}
1156
1157std::unique_ptr<IWorkload> IWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
1158 const WorkloadInfo& info) const
1159{
1160 return std::unique_ptr<IWorkload>();
1161}
1162
1163std::unique_ptr<IWorkload> IWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
1164 const WorkloadInfo& info) const
1165{
1166 return std::unique_ptr<IWorkload>();
1167}
1168
Kevin Mayce5045a2019-10-02 14:07:47 +01001169std::unique_ptr<IWorkload> IWorkloadFactory::CreateInstanceNormalization(
1170 const InstanceNormalizationQueueDescriptor& descriptor,
1171 const WorkloadInfo& info) const
1172{
1173 return std::unique_ptr<IWorkload>();
1174}
1175
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001176std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
1177 const WorkloadInfo& info) const
1178{
1179 return std::unique_ptr<IWorkload>();
1180}
1181
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +01001182std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
1183 const WorkloadInfo& info) const
1184{
1185 return std::unique_ptr<IWorkload>();
1186}
1187
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001188std::unique_ptr<IWorkload> IWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
1189 const WorkloadInfo& info) const
1190{
1191 return std::unique_ptr<IWorkload>();
1192}
1193
1194std::unique_ptr<IWorkload> IWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
1195 const WorkloadInfo& info) const
1196{
1197 return std::unique_ptr<IWorkload>();
1198}
1199
1200std::unique_ptr<IWorkload> IWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
1201 const WorkloadInfo& Info) const
1202{
1203 return std::unique_ptr<IWorkload>();
1204}
1205
1206std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
1207 const WorkloadInfo& info) const
1208{
1209 return std::unique_ptr<IWorkload>();
1210}
1211
Derek Lambertif674aa02019-08-01 15:56:25 +01001212std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
1213 const WorkloadInfo& info) const
1214{
1215 return std::unique_ptr<IWorkload>();
1216}
1217
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01001218std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerge(const MergeQueueDescriptor& descriptor,
1219 const WorkloadInfo& info) const
1220{
1221 return std::unique_ptr<IWorkload>();
1222}
1223
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001224std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
1225 const WorkloadInfo& info) const
1226{
1227 return std::unique_ptr<IWorkload>();
1228}
1229
1230std::unique_ptr<IWorkload> IWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
1231 const WorkloadInfo& info) const
1232{
1233 return std::unique_ptr<IWorkload>();
1234}
1235
1236std::unique_ptr<IWorkload> IWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
1237 const WorkloadInfo& info) const
1238{
1239 return std::unique_ptr<IWorkload>();
1240}
1241
1242std::unique_ptr<IWorkload> IWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
1243 const WorkloadInfo& info) const
1244{
1245 return std::unique_ptr<IWorkload>();
1246}
1247
1248std::unique_ptr<IWorkload> IWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
1249 const WorkloadInfo& info) const
1250{
1251 return std::unique_ptr<IWorkload>();
1252}
1253
1254std::unique_ptr<IWorkload> IWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
1255 const WorkloadInfo& Info) const
1256{
1257 return std::unique_ptr<IWorkload>();
1258}
1259
1260std::unique_ptr<IWorkload> IWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
1261 const WorkloadInfo& info) const
1262{
1263 return std::unique_ptr<IWorkload>();
1264}
1265
1266std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
1267 const WorkloadInfo& info) const
1268{
1269 return std::unique_ptr<IWorkload>();
1270}
1271
1272std::unique_ptr<IWorkload> IWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& descriptor,
1273 const WorkloadInfo& info) const
1274{
1275 return std::unique_ptr<IWorkload>();
1276}
1277
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001278std::unique_ptr<IWorkload> IWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &descriptor,
1279 const WorkloadInfo &info) const
1280{
1281 return std::unique_ptr<IWorkload>();
1282}
1283
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001284std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
1285 const WorkloadInfo& Info) const
1286{
1287 return std::unique_ptr<IWorkload>();
1288}
1289
James Conroyee18dc82019-07-17 11:27:46 +01001290std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& descriptor,
1291 const WorkloadInfo& info) const
1292{
1293 return std::unique_ptr<IWorkload>();
1294}
1295
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001296std::unique_ptr<IWorkload> IWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
1297 const WorkloadInfo& info) const
1298{
1299 return std::unique_ptr<IWorkload>();
1300}
1301
1302std::unique_ptr<IWorkload> IWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
1303 const WorkloadInfo& info) const
1304{
1305 return std::unique_ptr<IWorkload>();
1306}
1307
Teresa Charlina9075df2019-06-27 15:41:57 +01001308std::unique_ptr<IWorkload> IWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
1309 const WorkloadInfo& info) const
1310{
1311 return std::unique_ptr<IWorkload>();
1312}
1313
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001314std::unique_ptr<IWorkload> IWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
1315 const WorkloadInfo& info) const
1316{
1317 return std::unique_ptr<IWorkload>();
1318}
1319
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001320std::unique_ptr<IWorkload> IWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
1321 const WorkloadInfo& info) const
1322{
1323 return std::unique_ptr<IWorkload>();
1324}
1325
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001326std::unique_ptr<IWorkload> IWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
1327 const WorkloadInfo& info) const
1328{
1329 return std::unique_ptr<IWorkload>();
1330}
1331
1332std::unique_ptr<IWorkload> IWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
1333 const WorkloadInfo& info) const
1334{
1335 return std::unique_ptr<IWorkload>();
1336}
1337
1338std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
1339 const WorkloadInfo& info) const
1340{
1341 return std::unique_ptr<IWorkload>();
1342}
1343
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001344std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
1345 const WorkloadInfo& info) const
1346{
1347 return std::unique_ptr<IWorkload>();
1348}
1349
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001350std::unique_ptr<IWorkload> IWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
1351 const WorkloadInfo& info) const
1352{
1353 return std::unique_ptr<IWorkload>();
1354}
1355
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001356std::unique_ptr<IWorkload> IWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
1357 const WorkloadInfo& Info) const
1358{
1359 return std::unique_ptr<IWorkload>();
1360}
1361
1362std::unique_ptr<IWorkload> IWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
1363 const WorkloadInfo& info) const
1364{
1365 return std::unique_ptr<IWorkload>();
1366}
1367
Sadik Armaganeff363d2019-04-05 15:25:46 +01001368std::unique_ptr<IWorkload> IWorkloadFactory::CreateSwitch(const SwitchQueueDescriptor& descriptor,
1369 const WorkloadInfo& info) const
1370{
1371 return std::unique_ptr<IWorkload>();
1372}
1373
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001374std::unique_ptr<IWorkload> IWorkloadFactory::CreateTransposeConvolution2d(
1375 const TransposeConvolution2dQueueDescriptor& descriptor,
1376 const WorkloadInfo& info) const
1377{
1378 return std::unique_ptr<IWorkload>();
surmeh013537c2c2018-05-18 16:31:43 +01001379}
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001380
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001381} // namepsace armnn