blob: 34e4cbe579816fcde1741c3b731a84b1ca565bd6 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00005
6#include "CpuTensorHandle.hpp"
Derek Lambertia9cca6a2019-03-25 15:41:58 +00007#include "WorkloadFactory.hpp"
8
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009
10#include <Layer.hpp>
11#include <LayersFwd.hpp>
David Beckdcb751f2018-10-03 11:42:42 +010012
David Beckb4540be2018-09-24 13:18:27 +010013#include <armnn/Types.hpp>
14#include <armnn/LayerSupport.hpp>
David Beck111b5d92018-11-12 14:59:37 +000015#include <armnn/ILayerSupport.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000016
David Beck111b5d92018-11-12 14:59:37 +000017#include <backendsCommon/BackendRegistry.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000018#include <backendsCommon/WorkloadFactory.hpp>
David Beck111b5d92018-11-12 14:59:37 +000019#include <backendsCommon/IBackendInternal.hpp>
Francis Murtagh46c09d02019-05-28 08:15:28 +010020#include <backendsCommon/test/WorkloadTestUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000021
22#include <boost/cast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000023#include <boost/iterator/transform_iterator.hpp>
24
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000025#include <cstring>
David Beck111b5d92018-11-12 14:59:37 +000026#include <sstream>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000027
telsoa014fcda012018-03-09 14:13:49 +000028namespace armnn
29{
30
telsoa01c577f2c2018-08-31 09:22:23 +010031namespace
32{
telsoa01c577f2c2018-08-31 09:22:23 +010033
David Beck29c75de2018-10-23 13:35:58 +010034const TensorInfo OverrideDataType(const TensorInfo& info, Optional<DataType> type)
35{
36 if (!type)
37 {
38 return info;
telsoa01c577f2c2018-08-31 09:22:23 +010039 }
40
David Beck29c75de2018-10-23 13:35:58 +010041 return TensorInfo(info.GetShape(), type.value(), info.GetQuantizationScale(), info.GetQuantizationOffset());
telsoa01c577f2c2018-08-31 09:22:23 +010042}
43
David Beck29c75de2018-10-23 13:35:58 +010044} // anonymous namespace
45
David Beck33f0ae02018-10-18 15:13:56 +010046bool IWorkloadFactory::IsLayerSupported(const BackendId& backendId,
David Beckdcb751f2018-10-03 11:42:42 +010047 const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +010048 Optional<DataType> dataType,
David Beckdcb751f2018-10-03 11:42:42 +010049 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +000050{
David Beck33f0ae02018-10-18 15:13:56 +010051 Optional<std::string&> reason = outReasonIfUnsupported;
telsoa014fcda012018-03-09 14:13:49 +000052 bool result;
David Beckdcb751f2018-10-03 11:42:42 +010053 const Layer& layer = *(boost::polymorphic_downcast<const Layer*>(&connectableLayer));
54
David Beck111b5d92018-11-12 14:59:37 +000055 auto const& backendRegistry = BackendRegistryInstance();
56 if (!backendRegistry.IsBackendRegistered(backendId))
57 {
58 std::stringstream ss;
59 ss << connectableLayer.GetName() << " is not supported on " << backendId
60 << " because this backend is not registered.";
61
62 outReasonIfUnsupported = ss.str();
63 return false;
64 }
65
66 auto backendFactory = backendRegistry.GetFactory(backendId);
67 auto backendObject = backendFactory();
68 auto layerSupportObject = backendObject->GetLayerSupport();
David Beck33f0ae02018-10-18 15:13:56 +010069
telsoa014fcda012018-03-09 14:13:49 +000070 switch(layer.GetType())
71 {
Kevin May868eb142019-09-04 17:29:31 +010072 case LayerType::Abs:
73 {
74 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
75 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
76 result = layerSupportObject->IsAbsSupported(OverrideDataType(input, dataType),
77 OverrideDataType(output, dataType),
78 reason);
79 break;
80 }
telsoa014fcda012018-03-09 14:13:49 +000081 case LayerType::Activation:
82 {
83 auto cLayer = boost::polymorphic_downcast<const ActivationLayer*>(&layer);
84 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +010085 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010086 result = layerSupportObject->IsActivationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010087 OverrideDataType(input, dataType),
88 OverrideDataType(output, dataType),
89 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +010090 reason);
telsoa014fcda012018-03-09 14:13:49 +000091 break;
92 }
93 case LayerType::Addition:
94 {
95 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
96 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
97 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +010098 result = layerSupportObject->IsAdditionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +010099 OverrideDataType(input0, dataType),
100 OverrideDataType(input1, dataType),
101 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100102 reason);
telsoa014fcda012018-03-09 14:13:49 +0000103 break;
104 }
Nikhil Rajee391d52019-09-05 17:50:44 +0100105 case LayerType::ArgMinMax:
106 {
107 auto cLayer = boost::polymorphic_downcast<const ArgMinMaxLayer*>(&layer);
108 const ArgMinMaxDescriptor& descriptor = cLayer->GetParameters();
109
110 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
111 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
112 result = layerSupportObject->IsArgMinMaxSupported(
113 OverrideDataType(input, dataType),
114 OverrideDataType(output, dataType),
115 descriptor,
116 reason);
117 break;
118 }
telsoa014fcda012018-03-09 14:13:49 +0000119 case LayerType::BatchNormalization:
120 {
121 auto cLayer = boost::polymorphic_downcast<const BatchNormalizationLayer*>(&layer);
122 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100123 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
124 const TensorInfo& mean = cLayer->m_Mean->GetTensorInfo();
125 const TensorInfo& var = cLayer->m_Variance->GetTensorInfo();
126 const TensorInfo& beta = cLayer->m_Beta->GetTensorInfo();
127 const TensorInfo& gamma = cLayer->m_Gamma->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100128 result = layerSupportObject->IsBatchNormalizationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100129 OverrideDataType(input, dataType),
130 OverrideDataType(output, dataType),
131 OverrideDataType(mean, dataType),
132 OverrideDataType(var, dataType),
133 OverrideDataType(beta, dataType),
134 OverrideDataType(gamma, dataType),
135 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100136 reason);
telsoa014fcda012018-03-09 14:13:49 +0000137 break;
138 }
Éanna Ó Catháin4e1e1362018-11-12 11:36:34 +0000139 case LayerType::BatchToSpaceNd:
140 {
141 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
142 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
143 auto cLayer = boost::polymorphic_downcast<const BatchToSpaceNdLayer*>(&layer);
144
145 result = layerSupportObject->IsBatchToSpaceNdSupported(OverrideDataType(input, dataType),
146 OverrideDataType(output, dataType),
147 cLayer->GetParameters(),
148 reason);
149 break;
150 }
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +0100151 case LayerType::Comparison:
152 {
153 auto cLayer = boost::polymorphic_downcast<const ComparisonLayer*>(&layer);
154
155 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
156 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
157 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
158
159 result = layerSupportObject->IsComparisonSupported(OverrideDataType(input0, dataType),
160 OverrideDataType(input1, dataType),
161 OverrideDataType(output, DataType::Boolean),
162 cLayer->GetParameters(),
163 reason);
164 break;
165 }
telsoa014fcda012018-03-09 14:13:49 +0000166 case LayerType::Constant:
167 {
168 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100169 result = layerSupportObject->IsConstantSupported(OverrideDataType(output, dataType), reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100170 break;
171 }
172 case LayerType::ConvertFp16ToFp32:
173 {
174 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
175 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100176 result = layerSupportObject->IsConvertFp16ToFp32Supported(input, output, reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100177 break;
178 }
179 case LayerType::ConvertFp32ToFp16:
180 {
181 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
182 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100183 result = layerSupportObject->IsConvertFp32ToFp16Supported(input, output, reason);
telsoa014fcda012018-03-09 14:13:49 +0000184 break;
185 }
186 case LayerType::Convolution2d:
187 {
188 auto cLayer = boost::polymorphic_downcast<const Convolution2dLayer*>(&layer);
arovir01a6824102018-08-28 17:40:45 +0100189
190 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
191 dataType);
telsoa01c577f2c2018-08-31 09:22:23 +0100192 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
surmeh013537c2c2018-05-18 16:31:43 +0100193 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
194
arovir01a6824102018-08-28 17:40:45 +0100195 const Convolution2dDescriptor& descriptor = cLayer->GetParameters();
surmeh013537c2c2018-05-18 16:31:43 +0100196
arovir01a6824102018-08-28 17:40:45 +0100197 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100198 Optional<TensorInfo> biases;
surmeh013537c2c2018-05-18 16:31:43 +0100199 if (descriptor.m_BiasEnabled)
200 {
David Beck5eec11d2018-10-04 15:43:17 +0100201 biases =
202 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
surmeh013537c2c2018-05-18 16:31:43 +0100203 }
204
David Beck33f0ae02018-10-18 15:13:56 +0100205 result = layerSupportObject->IsConvolution2dSupported(
surmeh013537c2c2018-05-18 16:31:43 +0100206 input,
207 output,
208 descriptor,
telsoa01c577f2c2018-08-31 09:22:23 +0100209 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100210 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100211 reason);
telsoa014fcda012018-03-09 14:13:49 +0000212 break;
213 }
Nattapat Chaimanowonga9a1cf12018-12-03 16:06:49 +0000214 case LayerType::Debug:
215 {
216 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
217 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
218
219 result = layerSupportObject->IsDebugSupported(OverrideDataType(input, dataType),
220 OverrideDataType(output, dataType),
221 reason);
222 break;
223 }
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +0100224 case LayerType::DepthToSpace:
225 {
226 auto cLayer = boost::polymorphic_downcast<const DepthToSpaceLayer*>(&layer);
227
228 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
229 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
230
231 result = layerSupportObject->IsDepthToSpaceSupported(OverrideDataType(input, dataType),
232 OverrideDataType(output, dataType),
233 cLayer->GetParameters(),
234 reason);
235 break;
236 }
telsoa014fcda012018-03-09 14:13:49 +0000237 case LayerType::DepthwiseConvolution2d:
238 {
239 auto cLayer = boost::polymorphic_downcast<const DepthwiseConvolution2dLayer*>(&layer);
telsoa01c577f2c2018-08-31 09:22:23 +0100240 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
241 dataType);
242 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
243 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
244
telsoa01c577f2c2018-08-31 09:22:23 +0100245 const DepthwiseConvolution2dDescriptor& descriptor = cLayer->GetParameters();
arovir01a6824102018-08-28 17:40:45 +0100246
247 // Construct optional biases object based on the value of m_BiasEnabled
David Beck5eec11d2018-10-04 15:43:17 +0100248 Optional<TensorInfo> biases;
telsoa01c577f2c2018-08-31 09:22:23 +0100249 if (descriptor.m_BiasEnabled)
250 {
David Beck5eec11d2018-10-04 15:43:17 +0100251 biases =
252 OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
telsoa01c577f2c2018-08-31 09:22:23 +0100253 }
telsoa01c577f2c2018-08-31 09:22:23 +0100254
David Beck33f0ae02018-10-18 15:13:56 +0100255 result = layerSupportObject->IsDepthwiseConvolutionSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100256 input,
257 output,
258 descriptor,
259 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
arovir01a6824102018-08-28 17:40:45 +0100260 biases,
David Beck33f0ae02018-10-18 15:13:56 +0100261 reason);
telsoa014fcda012018-03-09 14:13:49 +0000262 break;
263 }
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000264 case LayerType::Dequantize:
265 {
266 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
267 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
268
269 result = layerSupportObject->IsDequantizeSupported(OverrideDataType(input, dataType),
270 OverrideDataType(output, DataType::Float32),
271 reason);
272 break;
273 }
Narumol Prangnawarat94dd5d82019-01-23 18:06:26 +0000274 case LayerType::DetectionPostProcess:
275 {
276 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
277 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
278 auto cLayer = boost::polymorphic_downcast<const DetectionPostProcessLayer*>(&layer);
279 const DetectionPostProcessDescriptor& descriptor = cLayer->GetParameters();
280 result = layerSupportObject->IsDetectionPostProcessSupported(input0,
281 input1,
282 descriptor,
283 reason);
284 break;
285 }
telsoa014fcda012018-03-09 14:13:49 +0000286 case LayerType::FakeQuantization:
287 {
288 auto cLayer = boost::polymorphic_downcast<const FakeQuantizationLayer*>(&layer);
289 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100290 result = layerSupportObject->IsFakeQuantizationSupported(OverrideDataType(input, dataType),
291 cLayer->GetParameters(),
292 reason);
telsoa014fcda012018-03-09 14:13:49 +0000293 break;
294 }
295 case LayerType::Floor:
296 {
297 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
298 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100299 result = layerSupportObject->IsFloorSupported(OverrideDataType(input, dataType),
300 OverrideDataType(output, dataType),
301 reason);
telsoa014fcda012018-03-09 14:13:49 +0000302 break;
303 }
304 case LayerType::FullyConnected:
305 {
306 auto cLayer = boost::polymorphic_downcast<const FullyConnectedLayer*>(&layer);
307 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100308 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
309 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
310
311 TensorInfo biasInfo;
312 const TensorInfo * biasInfoPtr = nullptr;
313 static const TensorInfo dummyFloat16Bias(TensorShape({1,1,1,1}), DataType::Float16);
314 static const TensorInfo dummyFloat32Bias(TensorShape({1,1,1,1}), DataType::Float32);
315 static const TensorInfo dummyQA8Bias(TensorShape({1,1,1,1}), DataType::Signed32);
316
317 const FullyConnectedDescriptor& descriptor = cLayer->GetParameters();
318 if (descriptor.m_BiasEnabled)
319 {
320 BOOST_ASSERT(cLayer->m_Bias.get() != nullptr);
321 biasInfo = OverrideDataType(cLayer->m_Bias->GetTensorInfo(), GetBiasTypeFromWeightsType(dataType));
322 biasInfoPtr = &biasInfo;
323 }
324 else
325 {
326 // If biases are not enabled pass a dummy tensorinfo for the validation
327 switch(input.GetDataType())
328 {
329 case DataType::Float16:
330 {
331 biasInfoPtr = &dummyFloat16Bias;
332 break;
333 }
334 case DataType::Float32:
335 {
336 biasInfoPtr = &dummyFloat32Bias;
337 break;
338 }
339 case DataType::QuantisedAsymm8:
Narumol Prangnawaratf9ac3fd2019-07-03 14:55:57 +0100340 case DataType::QuantisedSymm16:
telsoa01c577f2c2018-08-31 09:22:23 +0100341 {
342 biasInfoPtr = &dummyQA8Bias;
343 break;
344 }
345 default:
346 {
347 BOOST_ASSERT_MSG(false, "Unexpected bias type");
348 }
349 }
350 }
351
David Beck33f0ae02018-10-18 15:13:56 +0100352 result = layerSupportObject->IsFullyConnectedSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100353 OverrideDataType(input, dataType),
354 OverrideDataType(output, dataType),
355 OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType),
356 *biasInfoPtr,
357 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100358 reason);
telsoa014fcda012018-03-09 14:13:49 +0000359 break;
360 }
narpra01b89b05f2019-01-16 09:53:09 +0000361 case LayerType::Gather:
362 {
363 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
364 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
365 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
366 result = layerSupportObject->IsGatherSupported(OverrideDataType(input0, dataType),
Ellen Norris-Thompsone0dbedf2019-06-24 09:23:38 +0100367 input1,
narpra01b89b05f2019-01-16 09:53:09 +0000368 OverrideDataType(output, dataType),
369 reason);
370 break;
371 }
telsoa014fcda012018-03-09 14:13:49 +0000372 case LayerType::Input:
373 {
374 const TensorInfo& input = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100375 result = layerSupportObject->IsInputSupported(OverrideDataType(input, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000376 break;
377 }
Kevin Mayce5045a2019-10-02 14:07:47 +0100378 case LayerType::InstanceNormalization:
379 {
380 auto cLayer = boost::polymorphic_downcast<const InstanceNormalizationLayer*>(&layer);
381 const InstanceNormalizationDescriptor& descriptor = cLayer->GetParameters();
382
383 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
384 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
385
386 result = layerSupportObject->IsInstanceNormalizationSupported(
387 OverrideDataType(input, dataType),
388 OverrideDataType(output, dataType),
389 descriptor,
390 reason);
391 break;
392 }
telsoa014fcda012018-03-09 14:13:49 +0000393 case LayerType::L2Normalization:
394 {
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100395 auto cLayer = boost::polymorphic_downcast<const L2NormalizationLayer*>(&layer);
396 const L2NormalizationDescriptor& descriptor = cLayer->GetParameters();
397
telsoa014fcda012018-03-09 14:13:49 +0000398 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100399 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100400
David Beck33f0ae02018-10-18 15:13:56 +0100401 result = layerSupportObject->IsL2NormalizationSupported(
Matteo Martincighbcd3c852018-09-28 14:14:12 +0100402 OverrideDataType(input, dataType),
403 OverrideDataType(output, dataType),
404 descriptor,
David Beck33f0ae02018-10-18 15:13:56 +0100405 reason);
telsoa01c577f2c2018-08-31 09:22:23 +0100406 break;
407 }
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +0100408 case LayerType::LogSoftmax:
409 {
410 auto cLayer = boost::polymorphic_downcast<const LogSoftmaxLayer*>(&layer);
411
412 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
413 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
414
415 result = layerSupportObject->IsLogSoftmaxSupported(OverrideDataType(input, dataType),
416 OverrideDataType(output, dataType),
417 cLayer->GetParameters(),
418 reason);
419 break;
420 }
telsoa01c577f2c2018-08-31 09:22:23 +0100421 case LayerType::Lstm:
422 {
423 auto cLayer = boost::polymorphic_downcast<const LstmLayer*>(&layer);
424 const LstmDescriptor& descriptor = cLayer->GetParameters();
425
426 // All inputs.
427 const TensorInfo& input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
428 dataType);
429 const TensorInfo& outputStateIn = OverrideDataType(layer.GetInputSlot(1).GetConnection()->GetTensorInfo(),
430 dataType);
431 const TensorInfo& cellStateIn = OverrideDataType(layer.GetInputSlot(2).GetConnection()->GetTensorInfo(),
432 dataType);
433 // All outputs
434 const TensorInfo& scratchBuffer = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
435 const TensorInfo& outputStateOut = OverrideDataType(layer.GetOutputSlot(1).GetTensorInfo(), dataType);
436 const TensorInfo& cellStateOut = OverrideDataType(layer.GetOutputSlot(2).GetTensorInfo(), dataType);
437 const TensorInfo& output = OverrideDataType(layer.GetOutputSlot(3).GetTensorInfo(), dataType);
438
439 // Basic parameters
440 const TensorInfo& inputToForgetWeights
441 = OverrideDataType(cLayer->m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(), dataType);
442 const TensorInfo& inputToCellWeights
443 = OverrideDataType(cLayer->m_BasicParameters.m_InputToCellWeights->GetTensorInfo(), dataType);
444 const TensorInfo& inputToOutputWeights
445 = OverrideDataType(cLayer->m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(), dataType);
446 const TensorInfo& recurrentToForgetWeights
447 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(), dataType);
448 const TensorInfo& recurrentToCellWeights
449 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(), dataType);
450 const TensorInfo& recurrentToOutputWeights
451 = OverrideDataType(cLayer->m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(), dataType);
452 const TensorInfo& forgetGateBias
453 = OverrideDataType(cLayer->m_BasicParameters.m_ForgetGateBias->GetTensorInfo(), dataType);
454 const TensorInfo& cellBias
455 = OverrideDataType(cLayer->m_BasicParameters.m_CellBias->GetTensorInfo(), dataType);
456 const TensorInfo& outputGateBias
457 = OverrideDataType(cLayer->m_BasicParameters.m_OutputGateBias->GetTensorInfo(), dataType);
458
Jan Eilersd01a83c2019-07-03 18:20:40 +0100459 LstmInputParamsInfo paramsInfo;
telsoa01c577f2c2018-08-31 09:22:23 +0100460
Jan Eilersd01a83c2019-07-03 18:20:40 +0100461 paramsInfo.m_InputToForgetWeights = &inputToForgetWeights;
462 paramsInfo.m_InputToCellWeights = &inputToCellWeights;
463 paramsInfo.m_InputToOutputWeights = &inputToOutputWeights;
464 paramsInfo.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
465 paramsInfo.m_RecurrentToCellWeights = &recurrentToCellWeights;
466 paramsInfo.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
467 paramsInfo.m_ForgetGateBias = &forgetGateBias;
468 paramsInfo.m_CellBias = &cellBias;
469 paramsInfo.m_OutputGateBias = &outputGateBias;
470
471
472 // Optional parameters
telsoa01c577f2c2018-08-31 09:22:23 +0100473 TensorInfo optInputToInputWeights;
474 TensorInfo optRecurrentToInputWeights;
475 TensorInfo optCellToInputWeights;
476 TensorInfo optInputGateBias;
477 TensorInfo optProjectionWeights;
478 TensorInfo optProjectionBias;
479 TensorInfo optCellToForgetWeights;
480 TensorInfo optCellToOutputWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100481 TensorInfo optInputLayerNormWeights;
482 TensorInfo optForgetLayerNormWeights;
483 TensorInfo optCellLayerNormWeights;
484 TensorInfo optOutputLayerNormWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100485
486 if(!descriptor.m_CifgEnabled)
487 {
488 optInputToInputWeights =
489 OverrideDataType(cLayer->m_CifgParameters.m_InputToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100490 paramsInfo.m_InputToInputWeights = &optInputToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100491
492 optRecurrentToInputWeights =
493 OverrideDataType(cLayer->m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100494 paramsInfo.m_RecurrentToInputWeights = &optRecurrentToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100495 if (cLayer->m_CifgParameters.m_CellToInputWeights != nullptr)
496 {
497 optCellToInputWeights =
498 OverrideDataType(cLayer->m_CifgParameters.m_CellToInputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100499 paramsInfo.m_CellToInputWeights = &optCellToInputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100500 }
501 optInputGateBias =
502 OverrideDataType(cLayer->m_CifgParameters.m_InputGateBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100503 paramsInfo.m_InputGateBias = &optInputGateBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100504 }
505
506 if(descriptor.m_ProjectionEnabled)
507 {
508 optProjectionWeights =
509 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100510 paramsInfo.m_ProjectionWeights = &optProjectionWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100511 if (cLayer->m_ProjectionParameters.m_ProjectionBias != nullptr)
512 {
513 optProjectionBias =
514 OverrideDataType(cLayer->m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100515 paramsInfo.m_ProjectionBias = &optProjectionBias;
telsoa01c577f2c2018-08-31 09:22:23 +0100516 }
517 }
518
519 if(descriptor.m_PeepholeEnabled)
520 {
521 optCellToForgetWeights =
522 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100523 paramsInfo.m_CellToForgetWeights = &optCellToForgetWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100524 optCellToOutputWeights =
525 OverrideDataType(cLayer->m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100526 paramsInfo.m_CellToOutputWeights = &optCellToOutputWeights;
telsoa01c577f2c2018-08-31 09:22:23 +0100527 }
528
Jan Eilers38e05bd2019-06-26 13:10:09 +0100529 if(descriptor.m_LayerNormEnabled)
530 {
Ferran Balaguere30c16e2019-07-24 17:03:45 +0100531 if (!descriptor.m_CifgEnabled)
532 {
533 optInputLayerNormWeights = OverrideDataType(
534 cLayer->m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(), dataType);
535 paramsInfo.m_InputLayerNormWeights = &optInputLayerNormWeights;
536 }
Jan Eilers38e05bd2019-06-26 13:10:09 +0100537
538 optForgetLayerNormWeights = OverrideDataType(
539 cLayer->m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100540 paramsInfo.m_ForgetLayerNormWeights = &optForgetLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100541
542 optCellLayerNormWeights = OverrideDataType(
543 cLayer->m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100544 paramsInfo.m_CellLayerNormWeights = &optCellLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100545
546 optOutputLayerNormWeights = OverrideDataType(
547 cLayer->m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(), dataType);
Jan Eilersd01a83c2019-07-03 18:20:40 +0100548 paramsInfo.m_OutputLayerNormWeights = &optOutputLayerNormWeights;
Jan Eilers38e05bd2019-06-26 13:10:09 +0100549 }
550
David Beck33f0ae02018-10-18 15:13:56 +0100551 result = layerSupportObject->IsLstmSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100552 input,
553 outputStateIn,
554 cellStateIn,
555 scratchBuffer,
556 outputStateOut,
557 cellStateOut,
558 output,
559 descriptor,
Jan Eilersd01a83c2019-07-03 18:20:40 +0100560 paramsInfo,
561 reason);
telsoa014fcda012018-03-09 14:13:49 +0000562 break;
563 }
Nattapat Chaimanowong5a4304a2018-11-28 10:44:37 +0000564 case LayerType::Maximum:
565 {
566 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
567 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
568 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
569
570 result = layerSupportObject->IsMaximumSupported(OverrideDataType(input0, dataType),
571 OverrideDataType(input1, dataType),
572 OverrideDataType(output, dataType),
573 reason);
574 break;
575 }
narpra01b89b05f2019-01-16 09:53:09 +0000576 case LayerType::MemCopy:
577 {
578 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
579 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000580
narpra01b89b05f2019-01-16 09:53:09 +0000581 result = layerSupportObject->IsMemCopySupported(OverrideDataType(input, dataType),
582 OverrideDataType(output, dataType),
583 reason);
584 break;
585 }
Derek Lambertif674aa02019-08-01 15:56:25 +0100586 case LayerType::MemImport:
587 {
588 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
589 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
590
591 result = layerSupportObject->IsMemImportSupported(OverrideDataType(input, dataType),
592 OverrideDataType(output, dataType),
593 reason);
594 break;
595 }
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100596 case LayerType::Merge:
597 {
598 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
599 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
600 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
601
602 result = layerSupportObject->IsMergeSupported(OverrideDataType(input0, dataType),
603 OverrideDataType(input1, dataType),
604 OverrideDataType(output, dataType),
605 reason);
606 break;
607 }
Jim Flynne242f2d2019-05-22 14:24:13 +0100608 case LayerType::Concat:
telsoa014fcda012018-03-09 14:13:49 +0000609 {
Jim Flynne242f2d2019-05-22 14:24:13 +0100610 auto cLayer = boost::polymorphic_downcast<const ConcatLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000611
telsoa01c577f2c2018-08-31 09:22:23 +0100612 // Get vector of all inputs.
613 auto getTensorInfo = [&dataType](const InputSlot& slot)
telsoa014fcda012018-03-09 14:13:49 +0000614 {
telsoa01c577f2c2018-08-31 09:22:23 +0100615 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
telsoa014fcda012018-03-09 14:13:49 +0000616 };
telsoa01c577f2c2018-08-31 09:22:23 +0100617 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
618 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
619 std::vector<TensorInfo> inputs(beginI, endI);
telsoa014fcda012018-03-09 14:13:49 +0000620
telsoa01c577f2c2018-08-31 09:22:23 +0100621 auto getTensorInfoPtr = [](const TensorInfo& info)
622 {
623 return &info;
624 };
625 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
626 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
627 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
telsoa014fcda012018-03-09 14:13:49 +0000628
Nikhil Raj8599a412018-11-19 14:51:07 +0000629 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
630
Jim Flynne242f2d2019-05-22 14:24:13 +0100631 result = layerSupportObject->IsConcatSupported(inputPtrs, output, cLayer->GetParameters(), reason);
632
633
telsoa014fcda012018-03-09 14:13:49 +0000634 break;
635 }
636 case LayerType::Multiplication:
637 {
638 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
639 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100640 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100641 result = layerSupportObject->IsMultiplicationSupported(
telsoa01c577f2c2018-08-31 09:22:23 +0100642 OverrideDataType(input0, dataType),
643 OverrideDataType(input1, dataType),
644 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100645 reason);
telsoa014fcda012018-03-09 14:13:49 +0000646 break;
647 }
648 case LayerType::Normalization:
649 {
650 auto cLayer = boost::polymorphic_downcast<const NormalizationLayer*>(&layer);
651 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
652 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100653 result = layerSupportObject->IsNormalizationSupported(OverrideDataType(input, dataType),
654 OverrideDataType(output, dataType),
655 cLayer->GetParameters(),
656 reason);
telsoa014fcda012018-03-09 14:13:49 +0000657 break;
658 }
659 case LayerType::Output:
660 {
661 const TensorInfo& output = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100662 result = layerSupportObject->IsOutputSupported(OverrideDataType(output, dataType), reason);
telsoa014fcda012018-03-09 14:13:49 +0000663 break;
664 }
665 case LayerType::Permute:
666 {
667 auto cLayer = boost::polymorphic_downcast<const PermuteLayer*>(&layer);
668 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
669 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100670 result = layerSupportObject->IsPermuteSupported(OverrideDataType(input, dataType),
671 OverrideDataType(output, dataType),
672 cLayer->GetParameters(),
673 reason);
telsoa014fcda012018-03-09 14:13:49 +0000674 break;
675 }
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100676 case LayerType::Pad:
677 {
678 auto cLayer = boost::polymorphic_downcast<const PadLayer*>(&layer);
679 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
680 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100681 result = layerSupportObject->IsPadSupported(
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100682 OverrideDataType(input, dataType),
683 OverrideDataType(output, dataType),
684 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100685 reason);
Mohamed Nour Abouelseoud5662c202018-09-24 13:30:09 +0100686 break;
687 }
telsoa014fcda012018-03-09 14:13:49 +0000688 case LayerType::Pooling2d:
689 {
690 auto cLayer = boost::polymorphic_downcast<const Pooling2dLayer*>(&layer);
691 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
692 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100693 result = layerSupportObject->IsPooling2dSupported(OverrideDataType(input, dataType),
694 OverrideDataType(output, dataType),
695 cLayer->GetParameters(),
696 reason);
telsoa014fcda012018-03-09 14:13:49 +0000697 break;
698 }
Matteo Martincigh49124022019-01-11 13:25:59 +0000699 case LayerType::PreCompiled:
700 {
701 auto cLayer = boost::polymorphic_downcast<const PreCompiledLayer*>(&layer);
702 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
703 result = layerSupportObject->IsPreCompiledSupported(OverrideDataType(input, dataType),
704 cLayer->GetParameters(),
705 reason);
706 break;
707 }
Derek Lambertia9cca6a2019-03-25 15:41:58 +0000708 case LayerType::Quantize:
709 {
710 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
711 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
712 result = layerSupportObject->IsQuantizeSupported(input, output, reason);
713 break;
714 }
James Conroyee18dc82019-07-17 11:27:46 +0100715 case LayerType::QuantizedLstm:
716 {
717 auto cLayer = boost::polymorphic_downcast<const QuantizedLstmLayer*>(&layer);
718
719 // Inputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100720 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
721 const TensorInfo& previousCellStateIn = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
722 const TensorInfo& previousOutputIn = layer.GetInputSlot(2).GetConnection()->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100723
724 // Outputs
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100725 const TensorInfo& cellStateOut = layer.GetOutputSlot(0).GetTensorInfo();
726 const TensorInfo& output = layer.GetOutputSlot(1).GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100727
728 // QuantizedLstm parameters
James Conroyee18dc82019-07-17 11:27:46 +0100729 QuantizedLstmInputParamsInfo paramsInfo;
730
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100731 paramsInfo.m_InputToInputWeights =
732 &cLayer->m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo();
733 paramsInfo.m_InputToForgetWeights =
734 &cLayer->m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo();
735 paramsInfo.m_InputToCellWeights =
736 &cLayer->m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo();
737 paramsInfo.m_InputToOutputWeights =
738 &cLayer->m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100739
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100740 paramsInfo.m_RecurrentToInputWeights =
741 &cLayer->m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo();
742 paramsInfo.m_RecurrentToForgetWeights =
743 &cLayer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo();
744 paramsInfo.m_RecurrentToCellWeights =
745 &cLayer->m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo();
746 paramsInfo.m_RecurrentToOutputWeights =
747 &cLayer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo();
James Conroyee18dc82019-07-17 11:27:46 +0100748
Ferran Balaguer737d9ff2019-08-01 09:58:08 +0100749 paramsInfo.m_InputGateBias =
750 &cLayer->m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo();
751 paramsInfo.m_ForgetGateBias =
752 &cLayer->m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo();
753 paramsInfo.m_CellBias =
754 &cLayer->m_QuantizedLstmParameters.m_CellBias->GetTensorInfo();
755 paramsInfo.m_OutputGateBias =
756 &cLayer->m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo();;
James Conroyee18dc82019-07-17 11:27:46 +0100757
758 result = layerSupportObject->IsQuantizedLstmSupported(input,
759 previousCellStateIn,
760 previousOutputIn,
761 cellStateOut,
762 output,
763 paramsInfo,
764 reason);
765 break;
766 }
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100767 case LayerType::Division:
768 {
769 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
770 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
771 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100772 result = layerSupportObject->IsDivisionSupported(
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100773 OverrideDataType(input0, dataType),
774 OverrideDataType(input1, dataType),
775 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100776 reason);
Francis Murtaghe7a86a42018-08-29 12:42:10 +0100777 break;
778 }
telsoa014fcda012018-03-09 14:13:49 +0000779 case LayerType::Reshape:
780 {
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000781 auto cLayer = boost::polymorphic_downcast<const ReshapeLayer*>(&layer);
telsoa014fcda012018-03-09 14:13:49 +0000782 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Matteo Martincigh992d6dc2019-01-10 17:34:20 +0000783 result = layerSupportObject->IsReshapeSupported(OverrideDataType(input, dataType),
784 cLayer->GetParameters(),
785 reason);
telsoa014fcda012018-03-09 14:13:49 +0000786 break;
787 }
Teresa Charlina9075df2019-06-27 15:41:57 +0100788 case LayerType::Resize:
789 {
790 auto cLayer = boost::polymorphic_downcast<const ResizeLayer*>(&layer);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +0100791 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Teresa Charlina9075df2019-06-27 15:41:57 +0100792 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
793 result = layerSupportObject->IsResizeSupported(OverrideDataType(input, dataType),
794 OverrideDataType(output, dataType),
795 cLayer->GetParameters(),
796 reason);
797 break;
798 }
Mohamed Nour Abouelseouda1d3c6a2018-12-27 12:39:16 +0000799 case LayerType::Rsqrt:
800 {
801 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
802 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
803 result = layerSupportObject->IsRsqrtSupported(OverrideDataType(input, dataType),
804 OverrideDataType(output, dataType),
805 reason);
806 break;
807 }
Aron Virginas-Tar636ab402019-09-16 14:27:45 +0100808 case LayerType::Slice:
809 {
810 auto cLayer = boost::polymorphic_downcast<const SliceLayer*>(&layer);
811
812 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
813 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
814
815 result = layerSupportObject->IsSliceSupported(OverrideDataType(input, dataType),
816 OverrideDataType(output, dataType),
817 cLayer->GetParameters(),
818 reason);
819 break;
820 }
telsoa014fcda012018-03-09 14:13:49 +0000821 case LayerType::Softmax:
822 {
823 auto cLayer = boost::polymorphic_downcast<const SoftmaxLayer*>(&layer);
824 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
telsoa01c577f2c2018-08-31 09:22:23 +0100825 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100826 result = layerSupportObject->IsSoftmaxSupported(OverrideDataType(input, dataType),
827 OverrideDataType(output, dataType),
828 cLayer->GetParameters(),
829 reason);
telsoa014fcda012018-03-09 14:13:49 +0000830 break;
831 }
Nattapat Chaimanowong207ef9a2018-11-02 10:57:25 +0000832 case LayerType::SpaceToBatchNd:
833 {
834 auto cLayer = boost::polymorphic_downcast<const SpaceToBatchNdLayer*>(&layer);
835 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
836 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
837 result = layerSupportObject->IsSpaceToBatchNdSupported(OverrideDataType(input, dataType),
838 OverrideDataType(output, dataType),
839 cLayer->GetParameters(),
840 reason);
841 break;
842 }
Aron Virginas-Tar972af152019-06-11 14:14:03 +0100843 case LayerType::SpaceToDepth:
844 {
845 auto cLayer = boost::polymorphic_downcast<const SpaceToDepthLayer*>(&layer);
846
847 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
848 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
849
850 result = layerSupportObject->IsSpaceToDepthSupported(OverrideDataType(input, dataType),
851 OverrideDataType(output, dataType),
852 cLayer->GetParameters(),
853 reason);
854 break;
855 }
telsoa014fcda012018-03-09 14:13:49 +0000856 case LayerType::Splitter:
857 {
858 auto cLayer = boost::polymorphic_downcast<const SplitterLayer*>(&layer);
859 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +0100860
861 // Get vector of all outputs.
862 auto getTensorInfo = [&dataType](const OutputSlot& slot)
863 {
864 return OverrideDataType(slot.GetTensorInfo(), dataType);
865 };
866 auto beginI = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfo);
867 auto endI = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfo);
868 std::vector<TensorInfo> outputs(beginI, endI);
869
870 const std::vector<std::reference_wrapper<TensorInfo>> outputPtrs(outputs.begin(), outputs.end());
871
David Beck33f0ae02018-10-18 15:13:56 +0100872 result = layerSupportObject->IsSplitterSupported(OverrideDataType(input, dataType),
Narumol Prangnawarat15eb5832019-05-20 15:31:05 +0100873 outputPtrs,
David Beck33f0ae02018-10-18 15:13:56 +0100874 cLayer->GetParameters(),
875 reason);
telsoa014fcda012018-03-09 14:13:49 +0000876 break;
877 }
Matthew Jackson2b8c1da2019-07-04 14:59:16 +0100878 case LayerType::Stack:
879 {
880 auto cLayer = boost::polymorphic_downcast<const StackLayer*>(&layer);
881
882 // Get vector of all inputs.
883 auto getTensorInfo = [&dataType](const InputSlot& slot)
884 {
885 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
886 };
887 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfo);
888 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfo);
889 std::vector<TensorInfo> inputs(beginI, endI);
890
891 auto getTensorInfoPtr = [](const TensorInfo& info)
892 {
893 return &info;
894 };
895 auto beginPtr = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
896 auto endPtr = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
897 std::vector<const TensorInfo*> inputPtrs(beginPtr, endPtr);
898
899 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
900
901 result = layerSupportObject->IsStackSupported(inputPtrs, output, cLayer->GetParameters(), reason);
902
903 break;
904 }
Derek Lamberti013c3902019-10-21 10:46:16 +0100905 case LayerType::StandIn:
906 {
907 auto cLayer = boost::polymorphic_downcast<const StandInLayer*>(&layer);
908
909 // Get vector of all inputs.
910 auto getTensorInfoIn = [&dataType](const InputSlot& slot)
911 {
912 return OverrideDataType(slot.GetConnectedOutputSlot()->GetTensorInfo(), dataType);
913 };
914 auto getTensorInfoOut = [&dataType](const OutputSlot& slot)
915 {
916 return OverrideDataType(slot.GetTensorInfo(), dataType);
917 };
918 auto beginI = boost::make_transform_iterator(layer.GetInputSlots().begin(), getTensorInfoIn);
919 auto endI = boost::make_transform_iterator(layer.GetInputSlots().end(), getTensorInfoIn);
920 std::vector<TensorInfo> inputs(beginI, endI);
921
922 auto beginO = boost::make_transform_iterator(layer.GetOutputSlots().begin(), getTensorInfoOut);
923 auto endO = boost::make_transform_iterator(layer.GetOutputSlots().end(), getTensorInfoOut);
924 std::vector<TensorInfo> outputs(beginO, endO);
925
926
927 auto getTensorInfoPtr = [](const TensorInfo& info)
928 {
929 return &info;
930 };
931 auto beginPtrI = boost::make_transform_iterator(inputs.begin(), getTensorInfoPtr);
932 auto endPtrI = boost::make_transform_iterator(inputs.end(), getTensorInfoPtr);
933 std::vector<const TensorInfo*> inputPtrs(beginPtrI, endPtrI);
934
935 auto beginPtrO = boost::make_transform_iterator(outputs.begin(), getTensorInfoPtr);
936 auto endPtrO = boost::make_transform_iterator(outputs.end(), getTensorInfoPtr);
937 std::vector<const TensorInfo*> outputPtrs(beginPtrO, endPtrO);
938
939
940 result = layerSupportObject->IsStandInSupported(inputPtrs,
941 outputPtrs,
942 cLayer->GetParameters(),
943 reason);
944 break;
945 }
Conor Kennedy430b5d82018-11-14 15:28:28 +0000946 case LayerType::StridedSlice:
947 {
948 auto cLayer = boost::polymorphic_downcast<const StridedSliceLayer*>(&layer);
949 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
950 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
951 result = layerSupportObject->IsStridedSliceSupported(OverrideDataType(input, dataType),
952 OverrideDataType(output, dataType),
953 cLayer->GetParameters(),
954 reason);
955 break;
956 }
David Beckc2044fe2018-09-05 15:00:38 +0100957 case LayerType::Subtraction:
958 {
959 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
960 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
961 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100962 result = layerSupportObject->IsSubtractionSupported(
David Beckc2044fe2018-09-05 15:00:38 +0100963 OverrideDataType(input0, dataType),
964 OverrideDataType(input1, dataType),
965 OverrideDataType(output, dataType),
David Beck33f0ae02018-10-18 15:13:56 +0100966 reason);
David Beckc2044fe2018-09-05 15:00:38 +0100967 break;
968 }
Sadik Armaganeff363d2019-04-05 15:25:46 +0100969 case LayerType::Switch:
970 {
971 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
972 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
973 const TensorInfo& output0 = layer.GetOutputSlot(0).GetTensorInfo();
974 const TensorInfo& output1 = layer.GetOutputSlot(1).GetTensorInfo();
975 result = layerSupportObject->IsSwitchSupported(OverrideDataType(input0, dataType),
976 OverrideDataType(input1, dataType),
977 OverrideDataType(output0, dataType),
978 OverrideDataType(output1, dataType),
979 reason);
980 break;
981 }
narpra0132b90462018-09-13 11:07:48 +0100982 case LayerType::Mean:
983 {
984 auto cLayer = boost::polymorphic_downcast<const MeanLayer*>(&layer);
985 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
986 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
David Beck33f0ae02018-10-18 15:13:56 +0100987 result = layerSupportObject->IsMeanSupported(
narpra0132b90462018-09-13 11:07:48 +0100988 OverrideDataType(input, dataType),
989 OverrideDataType(output, dataType),
990 cLayer->GetParameters(),
David Beck33f0ae02018-10-18 15:13:56 +0100991 reason);
narpra0132b90462018-09-13 11:07:48 +0100992 break;
993 }
kevmay0190539692018-11-29 08:40:19 +0000994 case LayerType::Minimum:
995 {
996 const TensorInfo& input0 = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
997 const TensorInfo& input1 = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
998 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
999 result = layerSupportObject->IsMinimumSupported(OverrideDataType(input0, dataType),
1000 OverrideDataType(input1, dataType),
1001 OverrideDataType(output, dataType),
1002 reason);
1003 break;
1004 }
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001005 case LayerType::Prelu:
1006 {
1007 const TensorInfo& input = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
1008 const TensorInfo& alpha = layer.GetInputSlot(1).GetConnection()->GetTensorInfo();
1009 const TensorInfo& output = layer.GetOutputSlot(0).GetTensorInfo();
1010 result = layerSupportObject->IsPreluSupported(OverrideDataType(input, dataType),
1011 OverrideDataType(alpha, dataType),
1012 OverrideDataType(output, dataType),
1013 reason);
1014 break;
1015 }
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001016 case LayerType::TransposeConvolution2d:
1017 {
1018 auto cLayer = boost::polymorphic_downcast<const TransposeConvolution2dLayer*>(&layer);
1019
1020 const TensorInfo input = OverrideDataType(layer.GetInputSlot(0).GetConnection()->GetTensorInfo(),
1021 dataType);
1022 const TensorInfo output = OverrideDataType(layer.GetOutputSlot(0).GetTensorInfo(), dataType);
1023
1024 const TransposeConvolution2dDescriptor& descriptor = cLayer->GetParameters();
1025
1026 Optional<TensorInfo> biases;
1027 if (descriptor.m_BiasEnabled)
1028 {
1029 BOOST_ASSERT(cLayer->m_Bias.get() != nullptr);
1030 biases = OverrideDataType(cLayer->m_Bias->GetTensorInfo(),
1031 GetBiasTypeFromWeightsType(dataType));
1032 }
1033
1034 BOOST_ASSERT(cLayer->m_Weight.get() != nullptr);
1035 const TensorInfo weights = OverrideDataType(cLayer->m_Weight->GetTensorInfo(), dataType);
1036
1037 result = layerSupportObject->IsTransposeConvolution2dSupported(input,
1038 output,
1039 descriptor,
1040 weights,
1041 biases,
1042 reason);
1043
1044 break;
1045 }
telsoa014fcda012018-03-09 14:13:49 +00001046 default:
1047 {
1048 BOOST_ASSERT_MSG(false, "WorkloadFactory did not recognise type of layer.");
David Beck33f0ae02018-10-18 15:13:56 +01001049 reason.value() = "Unrecognised layer type";
telsoa014fcda012018-03-09 14:13:49 +00001050 result = false;
1051 break;
1052 }
1053 }
telsoa014fcda012018-03-09 14:13:49 +00001054 return result;
1055}
1056
David Beckdcb751f2018-10-03 11:42:42 +01001057bool IWorkloadFactory::IsLayerSupported(const IConnectableLayer& connectableLayer,
David Beck29c75de2018-10-23 13:35:58 +01001058 Optional<DataType> dataType,
telsoa01c577f2c2018-08-31 09:22:23 +01001059 std::string& outReasonIfUnsupported)
telsoa014fcda012018-03-09 14:13:49 +00001060{
David Beckdcb751f2018-10-03 11:42:42 +01001061 auto layer = boost::polymorphic_downcast<const Layer*>(&connectableLayer);
David Beck33f0ae02018-10-18 15:13:56 +01001062 return IsLayerSupported(layer->GetBackendId(), connectableLayer, dataType, outReasonIfUnsupported);
telsoa014fcda012018-03-09 14:13:49 +00001063}
1064
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001065// Default Implementations
Kevin May868eb142019-09-04 17:29:31 +01001066std::unique_ptr<IWorkload> IWorkloadFactory::CreateAbs(const AbsQueueDescriptor& descriptor,
1067 const WorkloadInfo& info) const
1068{
1069 return std::unique_ptr<IWorkload>();
1070}
1071
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001072std::unique_ptr<IWorkload> IWorkloadFactory::CreateActivation(const ActivationQueueDescriptor& descriptor,
1073 const WorkloadInfo& info) const
1074{
1075 return std::unique_ptr<IWorkload>();
1076}
1077
1078std::unique_ptr<IWorkload> IWorkloadFactory::CreateAddition(const AdditionQueueDescriptor& descriptor,
1079 const WorkloadInfo& info) const
1080{
1081 return std::unique_ptr<IWorkload>();
1082}
1083
Nikhil Rajee391d52019-09-05 17:50:44 +01001084std::unique_ptr<IWorkload> IWorkloadFactory::CreateArgMinMax(const ArgMinMaxQueueDescriptor& descriptor,
1085 const WorkloadInfo& info) const
1086{
1087 return std::unique_ptr<IWorkload>();
1088}
1089
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001090std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchNormalization(
1091 const BatchNormalizationQueueDescriptor& descriptor, const WorkloadInfo& info) const
1092{
1093 return std::unique_ptr<IWorkload>();
1094}
1095
1096std::unique_ptr<IWorkload> IWorkloadFactory::CreateBatchToSpaceNd(const BatchToSpaceNdQueueDescriptor& descriptor,
1097 const WorkloadInfo& Info) const
1098{
1099 return std::unique_ptr<IWorkload>();
1100}
1101
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001102std::unique_ptr<IWorkload> IWorkloadFactory::CreateComparison(const ComparisonQueueDescriptor& descriptor,
1103 const WorkloadInfo& info) const
1104{
1105 return std::unique_ptr<IWorkload>();
1106}
1107
Jim Flynne242f2d2019-05-22 14:24:13 +01001108std::unique_ptr<IWorkload> IWorkloadFactory::CreateConcat(const ConcatQueueDescriptor& descriptor,
Jim Flynn4ed6c832019-05-20 11:02:46 +01001109 const WorkloadInfo& info) const
1110{
1111 return std::unique_ptr<IWorkload>();
1112}
1113
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001114std::unique_ptr<IWorkload> IWorkloadFactory::CreateConstant(const ConstantQueueDescriptor& descriptor,
1115 const WorkloadInfo& info) const
1116{
1117 return std::unique_ptr<IWorkload>();
1118}
1119
1120std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp16ToFp32(const ConvertFp16ToFp32QueueDescriptor& descriptor,
1121 const WorkloadInfo& info) const
1122{
1123 return std::unique_ptr<IWorkload>();
1124}
1125
1126std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvertFp32ToFp16(const ConvertFp32ToFp16QueueDescriptor& descriptor,
1127 const WorkloadInfo& info) const
1128{
1129 return std::unique_ptr<IWorkload>();
1130}
1131
1132std::unique_ptr<IWorkload> IWorkloadFactory::CreateConvolution2d(const Convolution2dQueueDescriptor& descriptor,
1133 const WorkloadInfo& info) const
1134{
1135 return std::unique_ptr<IWorkload>();
1136}
1137
1138std::unique_ptr<IWorkload> IWorkloadFactory::CreateDebug(const DebugQueueDescriptor& descriptor,
1139 const WorkloadInfo& info) const
1140{
1141 return std::unique_ptr<IWorkload>();
1142}
1143
Aron Virginas-Tardd6247f2019-09-19 14:31:17 +01001144std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthToSpace(const DepthToSpaceQueueDescriptor& descriptor,
1145 const WorkloadInfo& info) const
1146{
1147 return std::unique_ptr<IWorkload>();
1148}
1149
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001150std::unique_ptr<IWorkload> IWorkloadFactory::CreateDepthwiseConvolution2d(
1151 const DepthwiseConvolution2dQueueDescriptor& descriptor, const WorkloadInfo& info) const
1152{
1153 return std::unique_ptr<IWorkload>();
1154}
1155
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00001156std::unique_ptr<IWorkload> IWorkloadFactory::CreateDequantize(
1157 const DequantizeQueueDescriptor& descriptor, const WorkloadInfo& info) const
1158{
1159 return std::unique_ptr<IWorkload>();
1160}
1161
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001162std::unique_ptr<IWorkload> IWorkloadFactory::CreateDetectionPostProcess(
1163 const DetectionPostProcessQueueDescriptor& descriptor, const WorkloadInfo& info) const
1164{
1165 return std::unique_ptr<IWorkload>();
1166}
1167
1168std::unique_ptr<IWorkload> IWorkloadFactory::CreateDivision(const DivisionQueueDescriptor& descriptor,
1169 const WorkloadInfo& info) const
1170{
1171 return std::unique_ptr<IWorkload>();
1172}
1173
1174std::unique_ptr<IWorkload> IWorkloadFactory::CreateEqual(const EqualQueueDescriptor& descriptor,
1175 const WorkloadInfo& Info) const
1176{
1177 return std::unique_ptr<IWorkload>();
1178}
1179
1180std::unique_ptr<IWorkload> IWorkloadFactory::CreateFakeQuantization(const FakeQuantizationQueueDescriptor& descriptor,
1181 const WorkloadInfo& info) const
1182{
1183 return std::unique_ptr<IWorkload>();
1184}
1185
1186std::unique_ptr<IWorkload> IWorkloadFactory::CreateFloor(const FloorQueueDescriptor& descriptor,
1187 const WorkloadInfo& info) const
1188{
1189 return std::unique_ptr<IWorkload>();
1190}
1191
1192std::unique_ptr<IWorkload> IWorkloadFactory::CreateFullyConnected(const FullyConnectedQueueDescriptor& descriptor,
1193 const WorkloadInfo& info) const
1194{
1195 return std::unique_ptr<IWorkload>();
1196}
1197
1198std::unique_ptr<IWorkload> IWorkloadFactory::CreateGather(const GatherQueueDescriptor& descriptor,
1199 const WorkloadInfo& info) const
1200{
1201 return std::unique_ptr<IWorkload>();
1202}
1203
1204std::unique_ptr<IWorkload> IWorkloadFactory::CreateGreater(const GreaterQueueDescriptor& descriptor,
1205 const WorkloadInfo& info) const
1206{
1207 return std::unique_ptr<IWorkload>();
1208}
1209
Kevin Mayce5045a2019-10-02 14:07:47 +01001210std::unique_ptr<IWorkload> IWorkloadFactory::CreateInstanceNormalization(
1211 const InstanceNormalizationQueueDescriptor& descriptor,
1212 const WorkloadInfo& info) const
1213{
1214 return std::unique_ptr<IWorkload>();
1215}
1216
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001217std::unique_ptr<IWorkload> IWorkloadFactory::CreateL2Normalization(const L2NormalizationQueueDescriptor& descriptor,
1218 const WorkloadInfo& info) const
1219{
1220 return std::unique_ptr<IWorkload>();
1221}
1222
Aron Virginas-Tarf982dea2019-10-11 14:07:53 +01001223std::unique_ptr<IWorkload> IWorkloadFactory::CreateLogSoftmax(const LogSoftmaxQueueDescriptor& descriptor,
1224 const WorkloadInfo& info) const
1225{
1226 return std::unique_ptr<IWorkload>();
1227}
1228
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001229std::unique_ptr<IWorkload> IWorkloadFactory::CreateLstm(const LstmQueueDescriptor& descriptor,
1230 const WorkloadInfo& info) const
1231{
1232 return std::unique_ptr<IWorkload>();
1233}
1234
1235std::unique_ptr<IWorkload> IWorkloadFactory::CreateMaximum(const MaximumQueueDescriptor& descriptor,
1236 const WorkloadInfo& info) const
1237{
1238 return std::unique_ptr<IWorkload>();
1239}
1240
1241std::unique_ptr<IWorkload> IWorkloadFactory::CreateMean(const MeanQueueDescriptor& descriptor,
1242 const WorkloadInfo& Info) const
1243{
1244 return std::unique_ptr<IWorkload>();
1245}
1246
1247std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemCopy(const MemCopyQueueDescriptor& descriptor,
1248 const WorkloadInfo& info) const
1249{
1250 return std::unique_ptr<IWorkload>();
1251}
1252
Derek Lambertif674aa02019-08-01 15:56:25 +01001253std::unique_ptr<IWorkload> IWorkloadFactory::CreateMemImport(const MemImportQueueDescriptor& descriptor,
1254 const WorkloadInfo& info) const
1255{
1256 return std::unique_ptr<IWorkload>();
1257}
1258
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01001259std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerge(const MergeQueueDescriptor& descriptor,
1260 const WorkloadInfo& info) const
1261{
1262 return std::unique_ptr<IWorkload>();
1263}
1264
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001265std::unique_ptr<IWorkload> IWorkloadFactory::CreateMerger(const MergerQueueDescriptor& descriptor,
1266 const WorkloadInfo& info) const
1267{
1268 return std::unique_ptr<IWorkload>();
1269}
1270
1271std::unique_ptr<IWorkload> IWorkloadFactory::CreateMinimum(const MinimumQueueDescriptor& descriptor,
1272 const WorkloadInfo& info) const
1273{
1274 return std::unique_ptr<IWorkload>();
1275}
1276
1277std::unique_ptr<IWorkload> IWorkloadFactory::CreateMultiplication(const MultiplicationQueueDescriptor& descriptor,
1278 const WorkloadInfo& info) const
1279{
1280 return std::unique_ptr<IWorkload>();
1281}
1282
1283std::unique_ptr<IWorkload> IWorkloadFactory::CreateNormalization(const NormalizationQueueDescriptor& descriptor,
1284 const WorkloadInfo& info) const
1285{
1286 return std::unique_ptr<IWorkload>();
1287}
1288
1289std::unique_ptr<IWorkload> IWorkloadFactory::CreateOutput(const OutputQueueDescriptor& descriptor,
1290 const WorkloadInfo& info) const
1291{
1292 return std::unique_ptr<IWorkload>();
1293}
1294
1295std::unique_ptr<IWorkload> IWorkloadFactory::CreatePad(const PadQueueDescriptor& descriptor,
1296 const WorkloadInfo& Info) const
1297{
1298 return std::unique_ptr<IWorkload>();
1299}
1300
1301std::unique_ptr<IWorkload> IWorkloadFactory::CreatePermute(const PermuteQueueDescriptor& descriptor,
1302 const WorkloadInfo& info) const
1303{
1304 return std::unique_ptr<IWorkload>();
1305}
1306
1307std::unique_ptr<IWorkload> IWorkloadFactory::CreatePooling2d(const Pooling2dQueueDescriptor& descriptor,
1308 const WorkloadInfo& info) const
1309{
1310 return std::unique_ptr<IWorkload>();
1311}
1312
1313std::unique_ptr<IWorkload> IWorkloadFactory::CreatePreCompiled(const PreCompiledQueueDescriptor& descriptor,
1314 const WorkloadInfo& info) const
1315{
1316 return std::unique_ptr<IWorkload>();
1317}
1318
Matteo Martincigh0e406ee2019-06-12 15:42:18 +01001319std::unique_ptr<IWorkload> IWorkloadFactory::CreatePrelu(const PreluQueueDescriptor &descriptor,
1320 const WorkloadInfo &info) const
1321{
1322 return std::unique_ptr<IWorkload>();
1323}
1324
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001325std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantize(const QuantizeQueueDescriptor& descriptor,
1326 const WorkloadInfo& Info) const
1327{
1328 return std::unique_ptr<IWorkload>();
1329}
1330
James Conroyee18dc82019-07-17 11:27:46 +01001331std::unique_ptr<IWorkload> IWorkloadFactory::CreateQuantizedLstm(const QuantizedLstmQueueDescriptor& descriptor,
1332 const WorkloadInfo& info) const
1333{
1334 return std::unique_ptr<IWorkload>();
1335}
1336
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001337std::unique_ptr<IWorkload> IWorkloadFactory::CreateReshape(const ReshapeQueueDescriptor& descriptor,
1338 const WorkloadInfo& info) const
1339{
1340 return std::unique_ptr<IWorkload>();
1341}
1342
1343std::unique_ptr<IWorkload> IWorkloadFactory::CreateResizeBilinear(const ResizeBilinearQueueDescriptor& descriptor,
1344 const WorkloadInfo& info) const
1345{
1346 return std::unique_ptr<IWorkload>();
1347}
1348
Teresa Charlina9075df2019-06-27 15:41:57 +01001349std::unique_ptr<IWorkload> IWorkloadFactory::CreateResize(const ResizeQueueDescriptor& descriptor,
1350 const WorkloadInfo& info) const
1351{
1352 return std::unique_ptr<IWorkload>();
1353}
1354
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001355std::unique_ptr<IWorkload> IWorkloadFactory::CreateRsqrt(const RsqrtQueueDescriptor& descriptor,
1356 const WorkloadInfo& info) const
1357{
1358 return std::unique_ptr<IWorkload>();
1359}
1360
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001361std::unique_ptr<IWorkload> IWorkloadFactory::CreateSlice(const SliceQueueDescriptor& descriptor,
1362 const WorkloadInfo& info) const
1363{
1364 return std::unique_ptr<IWorkload>();
1365}
1366
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001367std::unique_ptr<IWorkload> IWorkloadFactory::CreateSoftmax(const SoftmaxQueueDescriptor& descriptor,
1368 const WorkloadInfo& info) const
1369{
1370 return std::unique_ptr<IWorkload>();
1371}
1372
1373std::unique_ptr<IWorkload> IWorkloadFactory::CreateSplitter(const SplitterQueueDescriptor& descriptor,
1374 const WorkloadInfo& info) const
1375{
1376 return std::unique_ptr<IWorkload>();
1377}
1378
1379std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToBatchNd(const SpaceToBatchNdQueueDescriptor& descriptor,
1380 const WorkloadInfo& info) const
1381{
1382 return std::unique_ptr<IWorkload>();
1383}
1384
Aron Virginas-Tar972af152019-06-11 14:14:03 +01001385std::unique_ptr<IWorkload> IWorkloadFactory::CreateSpaceToDepth(const SpaceToDepthQueueDescriptor& descriptor,
1386 const WorkloadInfo& info) const
1387{
1388 return std::unique_ptr<IWorkload>();
1389}
1390
Matthew Jackson2b8c1da2019-07-04 14:59:16 +01001391std::unique_ptr<IWorkload> IWorkloadFactory::CreateStack(const StackQueueDescriptor& descriptor,
1392 const WorkloadInfo& info) const
1393{
1394 return std::unique_ptr<IWorkload>();
1395}
1396
Derek Lambertia9cca6a2019-03-25 15:41:58 +00001397std::unique_ptr<IWorkload> IWorkloadFactory::CreateStridedSlice(const StridedSliceQueueDescriptor& descriptor,
1398 const WorkloadInfo& Info) const
1399{
1400 return std::unique_ptr<IWorkload>();
1401}
1402
1403std::unique_ptr<IWorkload> IWorkloadFactory::CreateSubtraction(const SubtractionQueueDescriptor& descriptor,
1404 const WorkloadInfo& info) const
1405{
1406 return std::unique_ptr<IWorkload>();
1407}
1408
Sadik Armaganeff363d2019-04-05 15:25:46 +01001409std::unique_ptr<IWorkload> IWorkloadFactory::CreateSwitch(const SwitchQueueDescriptor& descriptor,
1410 const WorkloadInfo& info) const
1411{
1412 return std::unique_ptr<IWorkload>();
1413}
1414
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001415std::unique_ptr<IWorkload> IWorkloadFactory::CreateTransposeConvolution2d(
1416 const TransposeConvolution2dQueueDescriptor& descriptor,
1417 const WorkloadInfo& info) const
1418{
1419 return std::unique_ptr<IWorkload>();
surmeh013537c2c2018-05-18 16:31:43 +01001420}
Aron Virginas-Tar639fb042019-06-20 14:28:19 +01001421
Aron Virginas-Tar636ab402019-09-16 14:27:45 +01001422} // namepsace armnn